Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

changed numpy import #77

Merged
merged 4 commits into from
Mar 20, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .github/scripts/check_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
LANGCHAIN_DIRS = {
"libs/genai",
"libs/vertexai",
"libs/community"
}

if __name__ == "__main__":
Expand All @@ -30,6 +31,8 @@
dirs_to_run.update({"libs/genai"})
elif "libs/vertexai" in file:
dirs_to_run.update({"libs/vertexai"})
elif "libs/community" in file:
dirs_to_run.update({"libs/community"})
else:
pass
json_output = json.dumps(list(dirs_to_run))
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/_all_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ on:
options:
- libs/genai
- libs/vertexai
- libs/community


# If another push to the same PR or branch happens while this workflow is still running,
Expand Down
8 changes: 4 additions & 4 deletions libs/community/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ TEST_FILE ?= tests/unit_tests/
integration_test integration_tests: TEST_FILE = tests/integration_tests/

test tests integration_test integration_tests:
poetry run pytest --release $(TEST_FILE)
poetry run pytest $(TEST_FILE)


######################
Expand All @@ -20,8 +20,8 @@ test tests integration_test integration_tests:
PYTHON_FILES=.
MYPY_CACHE=.mypy_cache
lint format: PYTHON_FILES=.
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/tools --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint_package: PYTHON_FILES=langchain_google_tools
lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/community --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
lint_package: PYTHON_FILES=langchain_google_community
lint_tests: PYTHON_FILES=tests
lint_tests: MYPY_CACHE=.mypy_cache_test

Expand All @@ -41,7 +41,7 @@ spell_check:
spell_fix:
poetry run codespell --toml pyproject.toml -w

check_imports: $(shell find langchain_google_tools -name '*.py')
check_imports: $(shell find langchain_google_community -name '*.py')
poetry run python ./scripts/check_imports.py $^

######################
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def __init__(
to use. Defaults to None.
"""
try:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

client_info = get_client_info(module="bigquery-vector-search")
self.bq_client = bigquery.Client(
Expand Down Expand Up @@ -126,7 +126,7 @@ def __init__(

def _initialize_table(self) -> Any:
"""Validates or creates the BigQuery table."""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

table_ref = bigquery.TableReference.from_string(self._full_table_id)
table = self.bq_client.create_table(table_ref, exists_ok=True)
Expand Down Expand Up @@ -192,7 +192,7 @@ def _initialize_vector_index(self) -> Any:
A vector index in BigQuery table enables efficient
approximate vector search.
"""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

if self._have_index or self._creating_index:
# Already have an index or in the process of creating one.
Expand Down Expand Up @@ -264,7 +264,7 @@ def _create_index(self): # type: ignore[no-untyped-def]

def _persist(self, data: Dict[str, Any]) -> None:
"""Saves documents and embeddings to BigQuery."""
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

data_len = len(data[list(data.keys())[0]])
if data_len == 0:
Expand Down Expand Up @@ -357,7 +357,7 @@ def get_documents(
List of ids from adding the texts into the vectorstore.
"""
if ids and len(ids) > 0:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

job_config = bigquery.QueryJobConfig(
query_parameters=[
Expand Down Expand Up @@ -424,7 +424,7 @@ def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> Optional[boo
"""
if not ids or len(ids) == 0:
return True
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

job_config = bigquery.QueryJobConfig(
query_parameters=[
Expand Down Expand Up @@ -465,7 +465,7 @@ def _search_with_score_and_embeddings_by_vector(
brute_force: bool = False,
fraction_lists_to_search: Optional[float] = None,
) -> List[Tuple[Document, List[float], float]]:
from google.cloud import bigquery
from google.cloud import bigquery # type: ignore[attr-defined]

# Create an index if no index exists.
if not self._have_index and not self._creating_index:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
from langchain_core.utils import get_from_dict_or_env

if TYPE_CHECKING:
from google.cloud.contentwarehouse_v1 import (
from google.cloud.contentwarehouse_v1 import ( # type: ignore[import]
DocumentServiceClient,
RequestMetadata,
SearchDocumentsRequest,
)
from google.cloud.contentwarehouse_v1.services.document_service.pagers import (
from google.cloud.contentwarehouse_v1.services.document_service.pagers import ( # type: ignore[import]
SearchDocumentsPager,
)

Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_google_community/gmail/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import re
from typing import Any, Iterator

from googleapiclient.discovery import build
from googleapiclient.discovery import build # type: ignore[import]
from langchain_community.chat_loaders.base import BaseChatLoader
from langchain_core.chat_sessions import ChatSession
from langchain_core.messages import HumanMessage
Expand Down
2 changes: 1 addition & 1 deletion libs/community/langchain_google_community/gmail/toolkit.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

if TYPE_CHECKING:
# This is for linting and IDE typehints
from googleapiclient.discovery import Resource
from googleapiclient.discovery import Resource # type: ignore[import]
else:
try:
# We do this so pydantic can resolve the types when instantiating
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from langchain_core.utils import get_from_dict_or_env

if TYPE_CHECKING:
from google.cloud.discoveryengine_v1beta import (
from google.cloud.discoveryengine_v1beta import ( # type: ignore[import]
ConversationalSearchServiceClient,
SearchRequest,
SearchResult,
Expand Down
Loading
Loading