Skip to content

Commit

Permalink
fix format & lint
Browse files Browse the repository at this point in the history
  • Loading branch information
mattf committed Dec 17, 2024
1 parent d09cece commit 20023ed
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 12 deletions.
8 changes: 4 additions & 4 deletions libs/ai-endpoints/langchain_nvidia_ai_endpoints/embeddings.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, List, Literal, Optional
from typing import Any, Dict, List, Literal, Optional

from langchain_core.embeddings import Embeddings
from langchain_core.outputs.llm_result import LLMResult
Expand Down Expand Up @@ -74,8 +74,8 @@ def __init__(self, **kwargs: Any):
trucate (str): "NONE", "START", "END", truncate input text if it exceeds
the model's context length. Default is "NONE", which raises
an error if an input is too long.
dimensions (int): The number of dimensions for the embeddings. This parameter
is not supported by all models.
dimensions (int): The number of dimensions for the embeddings. This
parameter is not supported by all models.
API Key:
- The recommended way to provide the API key is through the `NVIDIA_API_KEY`
Expand Down Expand Up @@ -135,7 +135,7 @@ def _embed(
# truncate: "NONE" | "START" | "END" -- default "NONE", error raised if
# an input is too long
# dimensions: int -- not supported by all models
payload = {
payload: Dict[str, Any] = {
"input": texts,
"model": self.model,
"encoding_format": "float",
Expand Down
32 changes: 24 additions & 8 deletions libs/ai-endpoints/tests/integration_tests/test_embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,9 @@ def test_embed_documents_truncate(


@pytest.mark.parametrize("dimensions", [32, 64, 128, 2048])
def test_embed_query_with_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_query_with_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
query = "foo bar"
Expand All @@ -107,7 +109,9 @@ def test_embed_query_with_dimensions(embedding_model: str, mode: dict, dimension


@pytest.mark.parametrize("dimensions", [32, 64, 128, 2048])
def test_embed_documents_with_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_documents_with_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
documents = ["foo bar", "bar foo"]
Expand All @@ -118,7 +122,9 @@ def test_embed_documents_with_dimensions(embedding_model: str, mode: dict, dimen


@pytest.mark.parametrize("dimensions", [102400])
def test_embed_query_with_large_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_query_with_large_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
query = "foo bar"
Expand All @@ -127,7 +133,9 @@ def test_embed_query_with_large_dimensions(embedding_model: str, mode: dict, dim


@pytest.mark.parametrize("dimensions", [102400])
def test_embed_documents_with_large_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_documents_with_large_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
documents = ["foo bar", "bar foo"]
Expand All @@ -138,22 +146,30 @@ def test_embed_documents_with_large_dimensions(embedding_model: str, mode: dict,


@pytest.mark.parametrize("dimensions", [-1])
def test_embed_query_invalid_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_query_invalid_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
query = "foo bar"
with pytest.raises(Exception) as exc:
NVIDIAEmbeddings(model=embedding_model, dimensions=dimensions, **mode).embed_query(query)
NVIDIAEmbeddings(
model=embedding_model, dimensions=dimensions, **mode
).embed_query(query)
assert "400" in str(exc.value)


@pytest.mark.parametrize("dimensions", [-1])
def test_embed_documents_invalid_dimensions(embedding_model: str, mode: dict, dimensions: int) -> None:
def test_embed_documents_invalid_dimensions(
embedding_model: str, mode: dict, dimensions: int
) -> None:
if embedding_model != "nvidia/llama-3.2-nv-embedqa-1b-v2":
pytest.skip("Model does not support custom dimensions.")
documents = ["foo bar", "bar foo"]
with pytest.raises(Exception) as exc:
NVIDIAEmbeddings(model=embedding_model, dimensions=dimensions, **mode).embed_documents(documents)
NVIDIAEmbeddings(
model=embedding_model, dimensions=dimensions, **mode
).embed_documents(documents)
assert "400" in str(exc.value)


Expand Down

0 comments on commit 20023ed

Please sign in to comment.