diff --git a/.github/workflows/prepare_release.yml b/.github/workflows/prepare_release.yml index 02c42f98..bbd05015 100644 --- a/.github/workflows/prepare_release.yml +++ b/.github/workflows/prepare_release.yml @@ -61,4 +61,4 @@ jobs: gh pr create -B main --title "$COMMIT_MESSAGE" \ --body 'Update ${{ github.event.inputs.packageName }} version from ${{ steps.packages_update.outputs.old_version }} to ${{ steps.packages_update.outputs.new_version }}' env: - GH_TOKEN: ${{ secrets.GH_TOKEN }} \ No newline at end of file + GH_TOKEN: ${{ secrets.GH_TOKEN }} diff --git a/.github/workflows/push_release.yml b/.github/workflows/push_release.yml index f0cb6224..716ab579 100644 --- a/.github/workflows/push_release.yml +++ b/.github/workflows/push_release.yml @@ -48,4 +48,4 @@ jobs: uv tool run twine upload dist/* env: TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} \ No newline at end of file + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/docs/how-to/document_search/distributed_ingestion.md b/docs/how-to/document_search/distributed_ingestion.md index 63b49ebf..70abf8d2 100644 --- a/docs/how-to/document_search/distributed_ingestion.md +++ b/docs/how-to/document_search/distributed_ingestion.md @@ -49,7 +49,7 @@ job_id = client.submit_job( runtime_env={ "working_dir": "./", "pip": [ - "ragbits-core[litellm]", + "ragbits-core", "ragbits-document-search[distributed]" ] }, @@ -62,7 +62,7 @@ Ray Jobs is also available as CLI commands. You can submit a job using the follo ```bash ray job submit \ --address http://:8265 \ - --runtime-env '{"pip": ["ragbits-core[litellm]", "ragbits-document-search[distributed]"]}'\ + --runtime-env '{"pip": ["ragbits-core", "ragbits-document-search[distributed]"]}'\ --working-dir . \ -- python script.py ``` diff --git a/docs/quickstart/quickstart1_prompts.md b/docs/quickstart/quickstart1_prompts.md index b20076ee..ea277c24 100644 --- a/docs/quickstart/quickstart1_prompts.md +++ b/docs/quickstart/quickstart1_prompts.md @@ -7,10 +7,10 @@ In this Quickstart guide, you will learn how to define a dynamic prompt in Ragbi To install Ragbits, run the following command in your terminal: ```bash -pip install ragbits[litellm] +pip install ragbits ``` -This command will install all the popular Ragbits packages, along with [LiteLLM](https://docs.litellm.ai/docs/), which we will use in this guide for communicating with LLM APIs. +This command will install all the popular Ragbits packages. ## Defining a Static Prompt The most standard way to define a prompt in Ragbits is to create a class that inherits from the `Prompt` class and configure it by setting values for appropriate properties. Here is an example of a simple prompt that asks the model to write a song about Ragbits: diff --git a/examples/apps/documents_chat.py b/examples/apps/documents_chat.py index 576d6e37..1f74eb0c 100644 --- a/examples/apps/documents_chat.py +++ b/examples/apps/documents_chat.py @@ -3,7 +3,7 @@ # dependencies = [ # "gradio", # "ragbits-document-search", -# "ragbits-core[chroma,litellm]", +# "ragbits-core[chroma]", # ] # /// from collections.abc import AsyncIterator diff --git a/examples/core/llm.py b/examples/core/llm.py index 91dfe2bf..a63fbf27 100644 --- a/examples/core/llm.py +++ b/examples/core/llm.py @@ -1,7 +1,7 @@ # /// script # requires-python = ">=3.10" # dependencies = [ -# "ragbits-core[litellm]", +# "ragbits-core", # ] # /// import asyncio diff --git a/examples/document-search/basic.py b/examples/document-search/basic.py index 0d3667f9..973a6375 100644 --- a/examples/document-search/basic.py +++ b/examples/document-search/basic.py @@ -26,7 +26,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[litellm]", +# "ragbits-core", # ] # /// diff --git a/examples/document-search/chroma.py b/examples/document-search/chroma.py index bdf27dc5..7fa18480 100644 --- a/examples/document-search/chroma.py +++ b/examples/document-search/chroma.py @@ -27,7 +27,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[chroma,litellm]", +# "ragbits-core[chroma]", # ] # /// diff --git a/examples/document-search/chroma_otel.py b/examples/document-search/chroma_otel.py index e5c28cb5..405ae0bb 100644 --- a/examples/document-search/chroma_otel.py +++ b/examples/document-search/chroma_otel.py @@ -45,7 +45,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[chroma,litellm,otel]", +# "ragbits-core[chroma,otel]", # ] # /// diff --git a/examples/document-search/distributed.py b/examples/document-search/distributed.py index 82d14896..9a2b8065 100644 --- a/examples/document-search/distributed.py +++ b/examples/document-search/distributed.py @@ -25,7 +25,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search[distributed]", -# "ragbits-core[litellm]", +# "ragbits-core", # ] # /// diff --git a/examples/document-search/from_config.py b/examples/document-search/from_config.py index 1b5912f8..0048fce6 100644 --- a/examples/document-search/from_config.py +++ b/examples/document-search/from_config.py @@ -24,7 +24,7 @@ class to rephrase the query. # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[chroma,litellm]", +# "ragbits-core[chroma]", # ] # /// diff --git a/examples/document-search/multimodal.py b/examples/document-search/multimodal.py index 0e932bd5..a7c07b22 100644 --- a/examples/document-search/multimodal.py +++ b/examples/document-search/multimodal.py @@ -27,7 +27,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[litellm]", +# "ragbits-core", # ] # /// import asyncio diff --git a/examples/document-search/qdrant.py b/examples/document-search/qdrant.py index b1ea998e..db8dfe8c 100644 --- a/examples/document-search/qdrant.py +++ b/examples/document-search/qdrant.py @@ -27,7 +27,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search", -# "ragbits-core[litellm,qdrant]", +# "ragbits-core[qdrant]", # ] # /// diff --git a/examples/evaluation/document-search/evaluate.py b/examples/evaluation/document-search/evaluate.py index 07a2996b..d421ce59 100644 --- a/examples/evaluation/document-search/evaluate.py +++ b/examples/evaluation/document-search/evaluate.py @@ -3,7 +3,7 @@ # dependencies = [ # "ragbits-document-search", # "ragbits-evaluate[relari]", -# "ragbits-core[litellm,chroma]", +# "ragbits-core[chroma]", # ] # /// import asyncio diff --git a/examples/evaluation/document-search/ingest.py b/examples/evaluation/document-search/ingest.py index 60758202..c40e01b5 100644 --- a/examples/evaluation/document-search/ingest.py +++ b/examples/evaluation/document-search/ingest.py @@ -2,7 +2,7 @@ # requires-python = ">=3.10" # dependencies = [ # "ragbits-document-search[huggingface]", -# "ragbits-core[litellm,chroma]", +# "ragbits-core[chroma]", # "hydra-core~=1.3.2", # "unstructured[md]>=0.15.13", # ] diff --git a/packages/ragbits-cli/src/ragbits/cli/__init__.py b/packages/ragbits-cli/src/ragbits/cli/__init__.py index 52696360..2fb82540 100644 --- a/packages/ragbits-cli/src/ragbits/cli/__init__.py +++ b/packages/ragbits-cli/src/ragbits/cli/__init__.py @@ -1,12 +1,29 @@ import importlib.util import pkgutil from pathlib import Path +from typing import Annotated -from typer import Typer +import typer import ragbits -app = Typer(no_args_is_help=True) +from .app import CLI, OutputType + +app = CLI(no_args_is_help=True) + + +@app.callback() +def output_type( + # `OutputType.text.value` used as a workaround for the issue with `typer.Option` not accepting Enum values + output: Annotated[ + OutputType, typer.Option("--output", "-o", help="Set the output type (text or json)") + ] = OutputType.text.value, # type: ignore +) -> None: + """Sets an output type for the CLI + Args: + output: type of output to be set + """ + app.set_output_type(output_type=output) def main() -> None: diff --git a/packages/ragbits-cli/src/ragbits/cli/app.py b/packages/ragbits-cli/src/ragbits/cli/app.py new file mode 100644 index 00000000..37a29067 --- /dev/null +++ b/packages/ragbits-cli/src/ragbits/cli/app.py @@ -0,0 +1,76 @@ +import json +from dataclasses import dataclass +from enum import Enum +from typing import Any + +import typer +from pydantic import BaseModel +from rich.console import Console +from rich.table import Table + + +class OutputType(Enum): + """Indicates a type of CLI output formatting""" + + text = "text" + json = "json" + + +@dataclass() +class CliState: + """A dataclass describing CLI state""" + + output_type: OutputType = OutputType.text + + +class CLI(typer.Typer): + """A CLI class with output formatting""" + + def __init__(self, *args: Any, **kwargs: Any): # noqa: ANN401 + super().__init__(*args, **kwargs) + self.state: CliState = CliState() + self.console: Console = Console() + + def set_output_type(self, output_type: OutputType) -> None: + """ + Set the output type in the app state + Args: + output_type: OutputType + """ + self.state.output_type = output_type + + def print_output(self, data: list[BaseModel] | BaseModel) -> None: + """ + Process and display output based on the current state's output type. + + Args: + data: list of ditionaries or list of pydantic models representing output of CLI function + """ + if isinstance(data, BaseModel): + data = [data] + if len(data) == 0: + self._print_empty_list() + return + first_el_instance = type(data[0]) + if any(not isinstance(datapoint, first_el_instance) for datapoint in data): + raise ValueError("All the rows need to be of the same type") + data_dicts: list[dict] = [output.model_dump(mode="python") for output in data] + output_type = self.state.output_type + if output_type == OutputType.json: + print(json.dumps(data_dicts, indent=4)) + elif output_type == OutputType.text: + table = Table(show_header=True, header_style="bold magenta") + properties = data[0].model_json_schema()["properties"] + for key in properties: + table.add_column(properties[key]["title"]) + for row in data_dicts: + table.add_row(*[str(value) for value in row.values()]) + self.console.print(table) + else: + raise ValueError(f"Output type: {output_type} not supported") + + def _print_empty_list(self) -> None: + if self.state.output_type == OutputType.text: + print("Empty data list") + elif self.state.output_type == OutputType.json: + print(json.dumps([])) diff --git a/packages/ragbits-core/pyproject.toml b/packages/ragbits-core/pyproject.toml index cf2c3caa..fb3280ed 100644 --- a/packages/ragbits-core/pyproject.toml +++ b/packages/ragbits-core/pyproject.toml @@ -36,6 +36,7 @@ dependencies = [ "pydantic>=2.9.1", "typer~=0.12.5", "tomli~=2.0.2", + "litellm~=1.46.0", ] [project.urls] @@ -48,9 +49,6 @@ dependencies = [ chroma = [ "chromadb~=0.4.24", ] -litellm = [ - "litellm~=1.46.0", -] local = [ "torch~=2.2.1", "transformers~=4.44.2", diff --git a/packages/ragbits-core/src/ragbits/core/cli.py b/packages/ragbits-core/src/ragbits/core/cli.py index a0788fbd..7f8ad1d9 100644 --- a/packages/ragbits-core/src/ragbits/core/cli.py +++ b/packages/ragbits-core/src/ragbits/core/cli.py @@ -6,11 +6,12 @@ from pathlib import Path import typer -from rich import print as pprint +from pydantic import BaseModel +from ragbits.cli.app import CLI from ragbits.core.config import core_config from ragbits.core.llms.base import LLMType -from ragbits.core.prompt.prompt import Prompt +from ragbits.core.prompt.prompt import ChatFormat, Prompt def _render(prompt_path: str, payload: str | None) -> Prompt: @@ -25,10 +26,17 @@ def _render(prompt_path: str, payload: str | None) -> Prompt: return prompt_cls() +class LLMResponseCliOutput(BaseModel): + """An output model for llm responses in CLI""" + + question: ChatFormat + answer: str | BaseModel | None = None + + prompts_app = typer.Typer(no_args_is_help=True) -def register(app: typer.Typer) -> None: +def register(app: CLI) -> None: """ Register the CLI commands for the package. @@ -68,9 +76,8 @@ def render(prompt_path: str, payload: str | None = None) -> None: Renders a prompt by loading a class from a module and initializing it with a given payload. """ prompt = _render(prompt_path=prompt_path, payload=payload) - - pprint("[orange3]RENDERED PROMPT:") - pprint(prompt.chat) + response = LLMResponseCliOutput(question=prompt.chat) + app.print_output(response) @prompts_app.command(name="exec") def execute( @@ -92,11 +99,8 @@ def execute( raise ValueError("`llm_factory` must be provided") llm = get_llm_from_factory(llm_factory) - response = asyncio.run(llm.generate(prompt)) - - pprint("[orange3]QUESTION:") - pprint(prompt.chat) - pprint("[orange3]ANSWER:") - pprint(response) + llm_output = asyncio.run(llm.generate(prompt)) + response = LLMResponseCliOutput(question=prompt.chat, answer=llm_output) + app.print_output(response) app.add_typer(prompts_app, name="prompts", help="Commands for managing prompts") diff --git a/packages/ragbits-core/src/ragbits/core/embeddings/litellm.py b/packages/ragbits-core/src/ragbits/core/embeddings/litellm.py index ad2d0929..4dbea167 100644 --- a/packages/ragbits-core/src/ragbits/core/embeddings/litellm.py +++ b/packages/ragbits-core/src/ragbits/core/embeddings/litellm.py @@ -1,9 +1,4 @@ -try: - import litellm - - HAS_LITELLM = True -except ImportError: - HAS_LITELLM = False +import litellm from ragbits.core.audit import trace from ragbits.core.embeddings import Embeddings @@ -40,13 +35,7 @@ def __init__( for more information, follow the instructions for your specific vendor in the\ [LiteLLM documentation](https://docs.litellm.ai/docs/embedding/supported_embedding). api_version: The API version for the call. - - Raises: - ImportError: If the 'litellm' extra requirements are not installed. """ - if not HAS_LITELLM: - raise ImportError("You need to install the 'litellm' extra requirements to use LiteLLM embeddings models") - super().__init__() self.model = model self.options = options or {} diff --git a/packages/ragbits-core/src/ragbits/core/llms/__init__.py b/packages/ragbits-core/src/ragbits/core/llms/__init__.py index b9c625e7..bf2ed301 100644 --- a/packages/ragbits-core/src/ragbits/core/llms/__init__.py +++ b/packages/ragbits-core/src/ragbits/core/llms/__init__.py @@ -3,8 +3,9 @@ from ragbits.core.utils.config_handling import get_cls_from_config from .base import LLM +from .litellm import LiteLLM -__all__ = ["LLM"] +__all__ = ["LLM", "LiteLLM"] module = sys.modules[__name__] diff --git a/packages/ragbits-core/src/ragbits/core/llms/clients/litellm.py b/packages/ragbits-core/src/ragbits/core/llms/clients/litellm.py index 7c5bfd04..a5915d29 100644 --- a/packages/ragbits-core/src/ragbits/core/llms/clients/litellm.py +++ b/packages/ragbits-core/src/ragbits/core/llms/clients/litellm.py @@ -1,17 +1,10 @@ from collections.abc import AsyncGenerator from dataclasses import dataclass +import litellm +from litellm.utils import CustomStreamWrapper, ModelResponse from pydantic import BaseModel -try: - import litellm - from litellm.utils import CustomStreamWrapper, ModelResponse - - HAS_LITELLM = True -except ImportError: - HAS_LITELLM = False - - from ragbits.core.audit import trace from ragbits.core.prompt import ChatFormat @@ -64,13 +57,7 @@ def __init__( api_key: API key used to authenticate with the LLM API. api_version: API version of the LLM API. use_structured_output: Whether to request a structured output from the model. Default is False. - - Raises: - ImportError: If the 'litellm' extra requirements are not installed. """ - if not HAS_LITELLM: - raise ImportError("You need to install the 'litellm' extra requirements to use LiteLLM models") - super().__init__(model_name) self.base_url = base_url self.api_key = api_key @@ -181,7 +168,7 @@ async def _get_litellm_response( options: LiteLLMOptions, response_format: type[BaseModel] | dict | None, stream: bool = False, - ) -> "ModelResponse | CustomStreamWrapper": + ) -> ModelResponse | CustomStreamWrapper: try: response = await litellm.acompletion( messages=conversation, diff --git a/packages/ragbits-core/src/ragbits/core/llms/litellm.py b/packages/ragbits-core/src/ragbits/core/llms/litellm.py index e50e26ce..13c1ebe7 100644 --- a/packages/ragbits-core/src/ragbits/core/llms/litellm.py +++ b/packages/ragbits-core/src/ragbits/core/llms/litellm.py @@ -2,12 +2,7 @@ import warnings from functools import cached_property -try: - import litellm - - HAS_LITELLM = True -except ImportError: - HAS_LITELLM = False +import litellm from ragbits.core.prompt.base import BasePrompt, ChatFormat @@ -47,13 +42,7 @@ def __init__( use_structured_output: Whether to request a [structured output](https://docs.litellm.ai/docs/completion/json_mode#pass-in-json_schema) from the model. Default is False. Can only be combined with models that support structured output. - - Raises: - ImportError: If the 'litellm' extra requirements are not installed. """ - if not HAS_LITELLM: - raise ImportError("You need to install the 'litellm' extra requirements to use LiteLLM models") - super().__init__(model_name, default_options) self.base_url = base_url self.api_key = api_key diff --git a/packages/ragbits-document-search/pyproject.toml b/packages/ragbits-document-search/pyproject.toml index b79b9ee7..df3a12db 100644 --- a/packages/ragbits-document-search/pyproject.toml +++ b/packages/ragbits-document-search/pyproject.toml @@ -57,7 +57,7 @@ dev-dependencies = [ "pytest-cov~=5.0.0", "pytest-asyncio~=0.24.0", "pip-licenses>=4.0.0,<5.0.0", - "ragbits[litellm,local]" + "ragbits[local]" ] [tool.uv.sources] diff --git a/packages/ragbits/pyproject.toml b/packages/ragbits/pyproject.toml index 1bfd9291..ae6b7f46 100644 --- a/packages/ragbits/pyproject.toml +++ b/packages/ragbits/pyproject.toml @@ -52,9 +52,6 @@ gcs = [ lab = [ "gradio~=4.44.0", ] -litellm = [ - "litellm~=1.46.0", -] local = [ "torch~=2.2.1", "transformers~=4.44.2", diff --git a/pyproject.toml b/pyproject.toml index c15c0f82..25d92cd2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ readme = "README.md" requires-python = ">=3.10" dependencies = [ "ragbits-cli", - "ragbits-core[chroma,lab,litellm,local,otel,qdrant]", + "ragbits-core[chroma,lab,local,otel,qdrant]", "ragbits-document-search[gcs,huggingface,distributed]", "ragbits-evaluate[relari]", "ragbits-guardrails[openai]", diff --git a/uv.lock b/uv.lock index fb7c5b47..8c6e5972 100644 --- a/uv.lock +++ b/uv.lock @@ -2514,6 +2514,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/bc/f7/7ec7fddc92e50714ea3745631f79bd9c96424cb2702632521028e57d3a36/multiprocess-0.70.16-py310-none-any.whl", hash = "sha256:c4a9944c67bd49f823687463660a2d6daae94c289adff97e0f9d696ba6371d02", size = 134824 }, { url = "https://files.pythonhosted.org/packages/50/15/b56e50e8debaf439f44befec5b2af11db85f6e0f344c3113ae0be0593a91/multiprocess-0.70.16-py311-none-any.whl", hash = "sha256:af4cabb0dac72abfb1e794fa7855c325fd2b55a10a44628a3c1ad3311c04127a", size = 143519 }, { url = "https://files.pythonhosted.org/packages/0a/7d/a988f258104dcd2ccf1ed40fdc97e26c4ac351eeaf81d76e266c52d84e2f/multiprocess-0.70.16-py312-none-any.whl", hash = "sha256:fc0544c531920dde3b00c29863377f87e1632601092ea2daca74e4beb40faa2e", size = 146741 }, + { url = "https://files.pythonhosted.org/packages/ea/89/38df130f2c799090c978b366cfdf5b96d08de5b29a4a293df7f7429fa50b/multiprocess-0.70.16-py38-none-any.whl", hash = "sha256:a71d82033454891091a226dfc319d0cfa8019a4e888ef9ca910372a446de4435", size = 132628 }, + { url = "https://files.pythonhosted.org/packages/da/d9/f7f9379981e39b8c2511c9e0326d212accacb82f12fbfdc1aa2ce2a7b2b6/multiprocess-0.70.16-py39-none-any.whl", hash = "sha256:a0bafd3ae1b732eac64be2e72038231c1ba97724b60b09400d68f229fcc2fbf3", size = 133351 }, ] [[package]] @@ -3321,8 +3323,6 @@ version = "6.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/18/c7/8c6872f7372eb6a6b2e4708b88419fb46b857f7a2e1892966b851cc79fc9/psutil-6.0.0.tar.gz", hash = "sha256:8faae4f310b6d969fa26ca0545338b21f73c6b15db7c4a8d934a5482faa818f2", size = 508067 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/66/78c9c3020f573c58101dc43a44f6855d01bbbd747e24da2f0c4491200ea3/psutil-6.0.0-cp27-none-win32.whl", hash = "sha256:02b69001f44cc73c1c5279d02b30a817e339ceb258ad75997325e0e6169d8b35", size = 249766 }, - { url = "https://files.pythonhosted.org/packages/e1/3f/2403aa9558bea4d3854b0e5e567bc3dd8e9fbc1fc4453c0aa9aafeb75467/psutil-6.0.0-cp27-none-win_amd64.whl", hash = "sha256:21f1fb635deccd510f69f485b87433460a603919b45e2a324ad65b0cc74f8fb1", size = 253024 }, { url = "https://files.pythonhosted.org/packages/0b/37/f8da2fbd29690b3557cca414c1949f92162981920699cd62095a984983bf/psutil-6.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:c588a7e9b1173b6e866756dde596fd4cad94f9399daf99ad8c3258b3cb2b47a0", size = 250961 }, { url = "https://files.pythonhosted.org/packages/35/56/72f86175e81c656a01c4401cd3b1c923f891b31fbcebe98985894176d7c9/psutil-6.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed2440ada7ef7d0d608f20ad89a04ec47d2d3ab7190896cd62ca5fc4fe08bf0", size = 287478 }, { url = "https://files.pythonhosted.org/packages/19/74/f59e7e0d392bc1070e9a70e2f9190d652487ac115bb16e2eff6b22ad1d24/psutil-6.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd9a97c8e94059b0ef54a7d4baf13b405011176c3b6ff257c247cae0d560ecd", size = 290455 }, @@ -3824,6 +3824,7 @@ version = "0.4.0" source = { editable = "packages/ragbits-core" } dependencies = [ { name = "jinja2" }, + { name = "litellm" }, { name = "pydantic" }, { name = "tomli" }, { name = "typer" }, @@ -3836,9 +3837,6 @@ chroma = [ lab = [ { name = "gradio" }, ] -litellm = [ - { name = "litellm" }, -] local = [ { name = "numpy" }, { name = "torch" }, @@ -3868,7 +3866,7 @@ requires-dist = [ { name = "chromadb", marker = "extra == 'chroma'", specifier = "~=0.4.24" }, { name = "gradio", marker = "extra == 'lab'", specifier = "~=4.44.0" }, { name = "jinja2", specifier = ">=3.1.4" }, - { name = "litellm", marker = "extra == 'litellm'", specifier = "~=1.46.0" }, + { name = "litellm", specifier = "~=1.46.0" }, { name = "numpy", marker = "extra == 'local'", specifier = "~=1.26.0" }, { name = "opentelemetry-api", marker = "extra == 'otel'", specifier = "~=1.27.0" }, { name = "pydantic", specifier = ">=2.9.1" }, @@ -3939,7 +3937,7 @@ dev = [ { name = "pytest", specifier = "~=8.3.3" }, { name = "pytest-asyncio", specifier = "~=0.24.0" }, { name = "pytest-cov", specifier = "~=5.0.0" }, - { name = "ragbits", extras = ["litellm", "local"] }, + { name = "ragbits", extras = ["local"] }, ] [[package]] @@ -4030,7 +4028,7 @@ version = "0.1.0" source = { virtual = "." } dependencies = [ { name = "ragbits-cli" }, - { name = "ragbits-core", extra = ["chroma", "lab", "litellm", "local", "otel", "qdrant"] }, + { name = "ragbits-core", extra = ["chroma", "lab", "local", "otel", "qdrant"] }, { name = "ragbits-document-search", extra = ["distributed", "gcs", "huggingface"] }, { name = "ragbits-evaluate", extra = ["relari"] }, { name = "ragbits-guardrails", extra = ["openai"] }, @@ -4058,7 +4056,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "ragbits-cli", editable = "packages/ragbits-cli" }, - { name = "ragbits-core", extras = ["chroma", "lab", "litellm", "local", "otel", "qdrant"], editable = "packages/ragbits-core" }, + { name = "ragbits-core", extras = ["chroma", "lab", "local", "otel", "qdrant"], editable = "packages/ragbits-core" }, { name = "ragbits-document-search", extras = ["gcs", "huggingface", "distributed"], editable = "packages/ragbits-document-search" }, { name = "ragbits-evaluate", extras = ["relari"], editable = "packages/ragbits-evaluate" }, { name = "ragbits-guardrails", extras = ["openai"], editable = "packages/ragbits-guardrails" },