Skip to content

Commit 4233559

Browse files
committed
feat: add helpful import guards for all optional dependencies
1 parent 1eb5d48 commit 4233559

9 files changed

Lines changed: 92 additions & 43 deletions

File tree

cli/alora/intrinsic_uploader.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,14 @@
1212
import tempfile
1313
from typing import Literal
1414

15-
import git
16-
from huggingface_hub import HfFolder, RepoUrl, create_repo, upload_file, upload_folder
15+
try:
16+
import git
17+
from huggingface_hub import HfFolder, RepoUrl, create_repo, upload_file, upload_folder
18+
except ImportError as e:
19+
raise ImportError(
20+
"The 'm alora upload' command requires extra dependencies. "
21+
'Please install them with: pip install "mellea[hf]"'
22+
) from e
1723

1824

1925
def upload_intrinsic(

cli/alora/train.py

Lines changed: 19 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,25 @@
1212
import sys
1313
import warnings
1414

15-
import torch
16-
import typer
17-
from datasets import Dataset
18-
from peft import LoraConfig, get_peft_model
19-
from transformers import (
20-
AutoModelForCausalLM,
21-
AutoTokenizer,
22-
TrainerCallback,
23-
TrainerControl,
24-
TrainerState,
25-
TrainingArguments,
26-
)
27-
from trl import DataCollatorForCompletionOnlyLM, SFTConfig, SFTTrainer
15+
try:
16+
import torch
17+
import typer
18+
from datasets import Dataset
19+
from peft import LoraConfig, get_peft_model
20+
from transformers import (
21+
AutoModelForCausalLM,
22+
AutoTokenizer,
23+
TrainerCallback,
24+
TrainerControl,
25+
TrainerState,
26+
TrainingArguments,
27+
)
28+
from trl import DataCollatorForCompletionOnlyLM, SFTConfig, SFTTrainer
29+
except ImportError as e:
30+
raise ImportError(
31+
"The 'm alora' command requires extra dependencies. "
32+
'Please install them with: pip install "mellea[hf]"'
33+
) from e
2834

2935
# Handle MPS with old PyTorch versions on macOS only
3036
# Accelerate's GradScaler requires PyTorch >= 2.8.0 for MPS

cli/alora/upload.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,13 @@
88

99
import os
1010

11-
from huggingface_hub import HfApi, HfFolder, create_repo, upload_folder
11+
try:
12+
from huggingface_hub import HfApi, HfFolder, create_repo, upload_folder
13+
except ImportError as e:
14+
raise ImportError(
15+
"The 'm alora upload' command requires extra dependencies. "
16+
'Please install them with: pip install "mellea[hf]"'
17+
) from e
1218

1319

1420
def upload_model(weight_path: str, model_name: str, private: bool = True):

cli/serve/app.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,10 +6,16 @@
66
import time
77
import uuid
88

9-
import typer
10-
import uvicorn
11-
from fastapi import FastAPI
12-
from fastapi.responses import JSONResponse
9+
try:
10+
import typer
11+
import uvicorn
12+
from fastapi import FastAPI
13+
from fastapi.responses import JSONResponse
14+
except ImportError as e:
15+
raise ImportError(
16+
"The 'm serve' command requires extra dependencies. "
17+
'Please install them with: pip install "mellea[server]"'
18+
) from e
1319

1420
from .models import (
1521
ChatCompletion,

mellea/backends/huggingface.py

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,18 +14,24 @@
1414
from collections.abc import Callable, Coroutine, Sequence
1515
from typing import Any, overload
1616

17-
import llguidance
18-
import llguidance.hf
19-
import llguidance.torch
20-
import torch
21-
from transformers import AutoModelForCausalLM, AutoTokenizer
22-
from transformers.cache_utils import DynamicCache
23-
from transformers.generation.logits_process import LogitsProcessorList
24-
from transformers.generation.streamers import AsyncTextIteratorStreamer
25-
from transformers.generation.utils import GenerateDecoderOnlyOutput
26-
from transformers.modeling_utils import PreTrainedModel
27-
from transformers.tokenization_utils import PreTrainedTokenizer
28-
from transformers.trainer_utils import set_seed
17+
try:
18+
import llguidance
19+
import llguidance.hf
20+
import llguidance.torch
21+
import torch
22+
from transformers import AutoModelForCausalLM, AutoTokenizer
23+
from transformers.cache_utils import DynamicCache
24+
from transformers.generation.logits_process import LogitsProcessorList
25+
from transformers.generation.streamers import AsyncTextIteratorStreamer
26+
from transformers.generation.utils import GenerateDecoderOnlyOutput
27+
from transformers.modeling_utils import PreTrainedModel
28+
from transformers.tokenization_utils import PreTrainedTokenizer
29+
from transformers.trainer_utils import set_seed
30+
except ImportError as e:
31+
raise ImportError(
32+
"The HuggingFace backend requires extra dependencies. "
33+
'Please install them with: pip install "mellea[hf]"'
34+
) from e
2935

3036
from ..backends import kv_block_helpers
3137
from ..core import (

mellea/backends/kv_block_helpers.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,16 @@
1111
from functools import reduce
1212
from typing import Any
1313

14-
import torch
15-
from transformers import PreTrainedModel
16-
from transformers.cache_utils import DynamicCache
17-
from transformers.tokenization_utils_base import BatchEncoding
14+
try:
15+
import torch
16+
from transformers import PreTrainedModel
17+
from transformers.cache_utils import DynamicCache
18+
from transformers.tokenization_utils_base import BatchEncoding
19+
except ImportError as e:
20+
raise ImportError(
21+
"The HuggingFace backend requires extra dependencies. "
22+
'Please install them with: pip install "mellea[hf]"'
23+
) from e
1824

1925
TokenizedCacheIterleaving = Iterable[BatchEncoding | DynamicCache]
2026
LegacyCache = Any

mellea/formatters/granite/retrievers/util.py

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,14 @@
1010
import zipfile
1111

1212
# Third Party
13-
import pyarrow as pa # type: ignore[import-not-found]
14-
import pyarrow.json as pj # type: ignore[import-not-found]
13+
try:
14+
import pyarrow as pa # type: ignore[import-not-found]
15+
import pyarrow.json as pj # type: ignore[import-not-found]
16+
except ImportError as e:
17+
raise ImportError(
18+
"The granite retrievers module requires extra dependencies. "
19+
'Please install them with: pip install "mellea[granite_retriever]"'
20+
) from e
1521

1622

1723
def download_mtrag_corpus(target_dir: str, corpus_name: str) -> pathlib.Path:

mellea/stdlib/components/docs/richdocument.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,17 @@
1313
import io
1414
from pathlib import Path
1515

16-
from docling.datamodel.base_models import InputFormat
17-
from docling.datamodel.pipeline_options import PdfPipelineOptions
18-
from docling.document_converter import DocumentConverter, PdfFormatOption
19-
from docling_core.types.doc.document import DoclingDocument, TableItem
20-
from docling_core.types.io import DocumentStream
16+
try:
17+
from docling.datamodel.base_models import InputFormat
18+
from docling.datamodel.pipeline_options import PdfPipelineOptions
19+
from docling.document_converter import DocumentConverter, PdfFormatOption
20+
from docling_core.types.doc.document import DoclingDocument, TableItem
21+
from docling_core.types.io import DocumentStream
22+
except ImportError as e:
23+
raise ImportError(
24+
"RichDocument requires extra dependencies. "
25+
'Please install them with: pip install "mellea[docling]"'
26+
) from e
2127

2228
from ....backends.tools import MelleaTool
2329
from ....core import CBlock, Component, ModelOutputThunk, TemplateRepresentation

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -97,6 +97,7 @@ cli = [
9797
server = [
9898
"uvicorn",
9999
"fastapi",
100+
"mellea[cli]",
100101
]
101102

102103
sandbox = [

0 commit comments

Comments
 (0)