Skip to content

Commit

Permalink
chore: fix linting/isort (#1215)
Browse files Browse the repository at this point in the history
* fix linting

* vertex fix
  • Loading branch information
anakin87 authored Nov 25, 2024
1 parent 6909e46 commit 7b9c8a6
Show file tree
Hide file tree
Showing 25 changed files with 28 additions and 28 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .document_embedder import AmazonBedrockDocumentEmbedder
from .text_embedder import AmazonBedrockTextEmbedder

__all__ = ["AmazonBedrockTextEmbedder", "AmazonBedrockDocumentEmbedder"]
__all__ = ["AmazonBedrockDocumentEmbedder", "AmazonBedrockTextEmbedder"]
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def run(self, documents: List[Document]):
- `documents`: The `Document`s with the `embedding` field populated.
:raises AmazonBedrockInferenceError: If the inference fails.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"AmazonBedrockDocumentEmbedder expects a list of Documents as input."
"In case you want to embed a string, please use the AmazonBedrockTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .chat.chat_generator import AmazonBedrockChatGenerator
from .generator import AmazonBedrockGenerator

__all__ = ["AmazonBedrockGenerator", "AmazonBedrockChatGenerator"]
__all__ = ["AmazonBedrockChatGenerator", "AmazonBedrockGenerator"]
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
from .chat.vertex_chat_generator import AnthropicVertexChatGenerator
from .generator import AnthropicGenerator

__all__ = ["AnthropicGenerator", "AnthropicChatGenerator", "AnthropicVertexChatGenerator"]
__all__ = ["AnthropicChatGenerator", "AnthropicGenerator", "AnthropicVertexChatGenerator"]
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .document_store import DEFAULT_VECTOR_SEARCH, AzureAISearchDocumentStore
from .filters import _normalize_filters

__all__ = ["AzureAISearchDocumentStore", "DEFAULT_VECTOR_SEARCH", "_normalize_filters"]
__all__ = ["DEFAULT_VECTOR_SEARCH", "AzureAISearchDocumentStore", "_normalize_filters"]
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .retriever import ChromaEmbeddingRetriever, ChromaQueryTextRetriever

__all__ = ["ChromaQueryTextRetriever", "ChromaEmbeddingRetriever"]
__all__ = ["ChromaEmbeddingRetriever", "ChromaQueryTextRetriever"]
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ def run(self, documents: List[Document]):
- `meta`: metadata about the embedding process.
:raises TypeError: if the input is not a list of `Documents`.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"CohereDocumentEmbedder expects a list of Documents as input."
"In case you want to embed a string, please use the CohereTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .chat.chat_generator import CohereChatGenerator
from .generator import CohereGenerator

__all__ = ["CohereGenerator", "CohereChatGenerator"]
__all__ = ["CohereChatGenerator", "CohereGenerator"]
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

__all__ = [
"FastembedDocumentEmbedder",
"FastembedTextEmbedder",
"FastembedSparseDocumentEmbedder",
"FastembedSparseTextEmbedder",
"FastembedTextEmbedder",
]
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def run(self, documents: List[Document]):
:returns: A dictionary with the following keys:
- `documents`: List of Documents with each Document's `embedding` field set to the computed embeddings.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"FastembedDocumentEmbedder expects a list of Documents as input. "
"In case you want to embed a list of strings, please use the FastembedTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def run(self, documents: List[Document]):
- `documents`: List of Documents with each Document's `sparse_embedding`
field set to the computed embeddings.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"FastembedSparseDocumentEmbedder expects a list of Documents as input. "
"In case you want to embed a list of strings, please use the FastembedTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ def run(self, query: str, documents: List[Document], top_k: Optional[int] = None
:raises ValueError: If `top_k` is not > 0.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = "FastembedRanker expects a list of Documents as input. "
raise TypeError(msg)
if query == "":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from .chat.gemini import GoogleAIGeminiChatGenerator
from .gemini import GoogleAIGeminiGenerator

__all__ = ["GoogleAIGeminiGenerator", "GoogleAIGeminiChatGenerator"]
__all__ = ["GoogleAIGeminiChatGenerator", "GoogleAIGeminiGenerator"]
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@

__all__ = [
"VertexAICodeGenerator",
"VertexAIGeminiGenerator",
"VertexAIGeminiChatGenerator",
"VertexAIGeminiGenerator",
"VertexAIImageCaptioner",
"VertexAIImageGenerator",
"VertexAIImageQA",
Expand Down
6 changes: 3 additions & 3 deletions integrations/google_vertex/tests/chat/test_gemini.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,13 +161,13 @@ def test_to_dict_with_params(_mock_vertexai_init, _mock_generative_model):
"name": "get_current_weather",
"description": "Get the current weather in a given location",
"parameters": {
"type_": "OBJECT",
"type": "OBJECT",
"properties": {
"location": {
"type_": "STRING",
"type": "STRING",
"description": "The city and state, e.g. San Francisco, CA",
},
"unit": {"type_": "STRING", "enum": ["celsius", "fahrenheit"]},
"unit": {"type": "STRING", "enum": ["celsius", "fahrenheit"]},
},
"required": ["location"],
"property_ordering": ["location", "unit"],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def run(self, documents: List[Document]):
param documents: A list of Documents to embed.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"InstructorDocumentEmbedder expects a list of Documents as input. "
"In case you want to embed a list of strings, please use the InstructorTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def run(self, documents: List[Document]):
- `meta`: A dictionary with metadata including the model name and usage statistics.
:raises TypeError: If the input is not a list of Documents.
"""
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"JinaDocumentEmbedder expects a list of Documents as input."
"In case you want to embed a string, please use the JinaTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
from .chat.chat_generator import LlamaCppChatGenerator
from .generator import LlamaCppGenerator

__all__ = ["LlamaCppGenerator", "LlamaCppChatGenerator"]
__all__ = ["LlamaCppChatGenerator", "LlamaCppGenerator"]
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
from .text_embedder import NvidiaTextEmbedder
from .truncate import EmbeddingTruncateMode

__all__ = ["NvidiaDocumentEmbedder", "NvidiaTextEmbedder", "EmbeddingTruncateMode"]
__all__ = ["EmbeddingTruncateMode", "NvidiaDocumentEmbedder", "NvidiaTextEmbedder"]
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ def run(self, documents: List[Document]):
if not self._initialized:
msg = "The embedding model has not been loaded. Please call warm_up() before running."
raise RuntimeError(msg)
elif not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
elif not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"NvidiaDocumentEmbedder expects a list of Documents as input."
"In case you want to embed a string, please use the NvidiaTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@
from .nim_backend import Model, NimBackend
from .utils import is_hosted, url_validation

__all__ = ["NimBackend", "Model", "is_hosted", "url_validation"]
__all__ = ["Model", "NimBackend", "is_hosted", "url_validation"]
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,10 @@

__all__ = [
"OptimumDocumentEmbedder",
"OptimumEmbedderOptimizationMode",
"OptimumEmbedderOptimizationConfig",
"OptimumEmbedderOptimizationMode",
"OptimumEmbedderPooling",
"OptimumEmbedderQuantizationMode",
"OptimumEmbedderQuantizationConfig",
"OptimumEmbedderQuantizationMode",
"OptimumTextEmbedder",
]
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def run(self, documents: List[Document]):
if not self._initialized:
msg = "The embedding model has not been loaded. Please call warm_up() before running."
raise RuntimeError(msg)
if not isinstance(documents, list) or documents and not isinstance(documents[0], Document):
if not isinstance(documents, list) or (documents and not isinstance(documents[0], Document)):
msg = (
"OptimumDocumentEmbedder expects a list of Documents as input."
" In case you want to embed a string, please use the OptimumTextEmbedder."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@

from .retriever import QdrantEmbeddingRetriever, QdrantHybridRetriever, QdrantSparseEmbeddingRetriever

__all__ = ("QdrantEmbeddingRetriever", "QdrantSparseEmbeddingRetriever", "QdrantHybridRetriever")
__all__ = ("QdrantEmbeddingRetriever", "QdrantHybridRetriever", "QdrantSparseEmbeddingRetriever")
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
from .document_store import WeaviateDocumentStore

__all__ = [
"WeaviateDocumentStore",
"AuthApiKey",
"AuthBearerToken",
"AuthClientCredentials",
"AuthClientPassword",
"AuthCredentials",
"WeaviateDocumentStore",
]

0 comments on commit 7b9c8a6

Please sign in to comment.