Skip to content

Commit

Permalink
fixed deps
Browse files Browse the repository at this point in the history
  • Loading branch information
lkuligin committed Mar 19, 2024
1 parent 62f13d6 commit c64280b
Show file tree
Hide file tree
Showing 12 changed files with 688 additions and 30 deletions.
8 changes: 4 additions & 4 deletions libs/community/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ TEST_FILE ?= tests/unit_tests/
integration_test integration_tests: TEST_FILE = tests/integration_tests/

test tests integration_test integration_tests:
poetry run pytest --release $(TEST_FILE)
poetry run pytest $(TEST_FILE)


######################
Expand All @@ -20,8 +20,8 @@ test tests integration_test integration_tests:
PYTHON_FILES=.
MYPY_CACHE=.mypy_cache
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/tools --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint_package: PYTHON_FILES=langchain_google_tools
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/community --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint_package: PYTHON_FILES=langchain_google_community
lint_tests: PYTHON_FILES=tests
lint_tests: MYPY_CACHE=.mypy_cache_test

Expand All @@ -41,7 +41,7 @@ spell_check:
spell_fix:
poetry run codespell --toml pyproject.toml -w

check_imports: $(shell find langchain_google_tools -name '*.py')
check_imports: $(shell find langchain_google_community -name '*.py')
poetry run python ./scripts/check_imports.py $^

######################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from threading import Lock, Thread
from typing import Any, Callable, Dict, List, Optional, Tuple, Type

import numpy as np
from google.api_core.exceptions import ClientError
from langchain_community.utils.google import get_client_info
from langchain_community.vectorstores.utils import (
Expand Down Expand Up @@ -87,7 +88,7 @@ def __init__(
to use. Defaults to None.
"""
try:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

client_info = get_client_info(module="bigquery-vector-search")
self.bq_client = bigquery.Client(
Expand All @@ -101,13 +102,6 @@ def __init__(
"Please, install or upgrade the google-cloud-bigquery library: "
"pip install google-cloud-bigquery"
)
try:
import numpy as np
except ModuleNotFoundError:
raise ImportError(
"Please, install or upgrade the numpy library: "
"pip install numpy"
)
self._logger = logging.getLogger(__name__)
self._creating_index = False
self._have_index = False
Expand All @@ -132,7 +126,7 @@ def __init__(

def _initialize_table(self) -> Any:
"""Validates or creates the BigQuery table."""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

table_ref = bigquery.TableReference.from_string(self._full_table_id)
table = self.bq_client.create_table(table_ref, exists_ok=True)
Expand Down Expand Up @@ -198,7 +192,7 @@ def _initialize_vector_index(self) -> Any:
A vector index in BigQuery table enables efficient
approximate vector search.
"""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

if self._have_index or self._creating_index:
# Already have an index or in the process of creating one.
Expand Down Expand Up @@ -270,7 +264,7 @@ def _create_index(self): # type: ignore[no-untyped-def]

def _persist(self, data: Dict[str, Any]) -> None:
"""Saves documents and embeddings to BigQuery."""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

data_len = len(data[list(data.keys())[0]])
if data_len == 0:
Expand Down Expand Up @@ -363,7 +357,7 @@ def get_documents(
List of ids from adding the texts into the vectorstore.
"""
if ids and len(ids) > 0:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

job_config = bigquery.QueryJobConfig(
query_parameters=[
Expand Down Expand Up @@ -430,7 +424,7 @@ def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> Optional[boo
"""
if not ids or len(ids) == 0:
return True
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

job_config = bigquery.QueryJobConfig(
query_parameters=[
Expand Down Expand Up @@ -471,7 +465,7 @@ def _search_with_score_and_embeddings_by_vector(
brute_force: bool = False,
fraction_lists_to_search: Optional[float] = None,
) -> List[Tuple[Document, List[float], float]]:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[import-untyped]

# Create an index if no index exists.
if not self._have_index and not self._creating_index:
Expand Down Expand Up @@ -735,8 +729,6 @@ def max_marginal_relevance_search(
Returns:
List of Documents selected by maximal marginal relevance.
"""
import numpy as np

query_embedding = self.embedding_model.embed_query( # type: ignore
query
)
Expand Down Expand Up @@ -785,8 +777,6 @@ def max_marginal_relevance_search_by_vector(
Returns:
List of Documents selected by maximal marginal relevance.
"""
import numpy as np

doc_tuples = self._search_with_score_and_embeddings_by_vector(
embedding, fetch_k, filter, brute_force, fraction_lists_to_search
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from langchain_core.utils import get_from_dict_or_env

if TYPE_CHECKING:
from google.cloud.contentwarehouse_v1 import (
from google.cloud.contentwarehouse_v1 import ( # type: ignore[import-untyped]
DocumentServiceClient,
RequestMetadata,
SearchDocumentsRequest,
)
from google.cloud.contentwarehouse_v1.services.document_service.pagers import (
from google.cloud.contentwarehouse_v1.services.document_service.pagers import ( # type: ignore[import-untyped]
SearchDocumentsPager,
)

Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_google_community/gmail/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import re
from typing import Any, Iterator

from googleapiclient.discovery import build
from googleapiclient.discovery import build # type: ignore[import-untyped]
from langchain_community.chat_loaders.base import BaseChatLoader
from langchain_core.chat_sessions import ChatSession
from langchain_core.messages import HumanMessage
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_google_community/gmail/toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

if TYPE_CHECKING:
# This is for linting and IDE typehints
from googleapiclient.discovery import Resource
from googleapiclient.discovery import Resource # type: ignore[import-untyped]
else:
try:
# We do this so pydantic can resolve the types when instantiating
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from langchain_core.utils import get_from_dict_or_env

if TYPE_CHECKING:
from google.cloud.discoveryengine_v1beta import (
from google.cloud.discoveryengine_v1beta import ( # type: ignore[import-untyped]
ConversationalSearchServiceClient,
SearchRequest,
SearchResult,
Expand Down
Loading

0 comments on commit c64280b

Please sign in to comment.