From 2ddae3b44d76625f1f7a95653ca161ad94b0287d Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:50:35 -0400 Subject: [PATCH 01/22] delete check_pydantic script --- libs/genai/scripts/check_pydantic.sh | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100755 libs/genai/scripts/check_pydantic.sh diff --git a/libs/genai/scripts/check_pydantic.sh b/libs/genai/scripts/check_pydantic.sh deleted file mode 100755 index 06b5bb81..00000000 --- a/libs/genai/scripts/check_pydantic.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# This script searches for lines starting with "import pydantic" or "from pydantic" -# in tracked files within a Git repository. -# -# Usage: ./scripts/check_pydantic.sh /path/to/repository - -# Check if a path argument is provided -if [ $# -ne 1 ]; then - echo "Usage: $0 /path/to/repository" - exit 1 -fi - -repository_path="$1" - -# Search for lines matching the pattern within the specified repository -result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') - -# Check if any matching lines were found -if [ -n "$result" ]; then - echo "ERROR: The following lines need to be updated:" - echo "$result" - echo "Please replace the code with an import from langchain_core.pydantic_v1." - echo "For example, replace 'from pydantic import BaseModel'" - echo "with 'from langchain_core.pydantic_v1 import BaseModel'" - exit 1 -fi From d6dc8beb105737b02d2ba31725febaba8ab74f2f Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:55:39 -0400 Subject: [PATCH 02/22] update dependencies --- libs/genai/poetry.lock | 19 ++++++++++--------- libs/genai/pyproject.toml | 11 ++++++----- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/libs/genai/poetry.lock b/libs/genai/poetry.lock index 24b56466..1cc03631 100644 --- a/libs/genai/poetry.lock +++ b/libs/genai/poetry.lock @@ -547,10 +547,10 @@ files = [ [[package]] name = "langchain-core" -version = "0.2.33" +version = "0.3.0.dev2" description = "Building applications with LLMs through composability" optional = false -python-versions = ">=3.8.1,<4.0" +python-versions = ">=3.9,<4.0" files = [] develop = false @@ -569,8 +569,8 @@ typing-extensions = ">=4.7" [package.source] type = "git" url = "https://github.com/langchain-ai/langchain.git" -reference = "HEAD" -resolved_reference = "c1bd4e05bcd180a36ba7b5d89ca2b5ce4f1e5d4a" +reference = "v0.3/dev_releases" +resolved_reference = "70f9d5a1f36114834aa6ae80b66ee21dd302b2a4" subdirectory = "libs/core" [[package]] @@ -578,20 +578,21 @@ name = "langchain-standard-tests" version = "0.1.1" description = "Standard tests for LangChain implementations" optional = false -python-versions = ">=3.8.1,<4.0" +python-versions = ">=3.9,<4.0" files = [] develop = false [package.dependencies] httpx = "^0.27.0" -langchain-core = ">=0.1.40,<0.3" +langchain-core = ">=0.3.0.dev1" pytest = ">=7,<9" +syrupy = "^4" [package.source] type = "git" url = "https://github.com/langchain-ai/langchain.git" -reference = "HEAD" -resolved_reference = "b83f1eb0d56dbf99605a1fce93953ee09d77aabf" +reference = "v0.3/dev_releases" +resolved_reference = "70f9d5a1f36114834aa6ae80b66ee21dd302b2a4" subdirectory = "libs/standard-tests" [[package]] @@ -1537,4 +1538,4 @@ images = ["pillow"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "6cdb00c9e430e592cea5d0bd158053ceb9e7e64734345fbb75cac364a8ee30c3" +content-hash = "dbd21956a77f5416a98b8be28da9feccd8fe83481bae76bb1a32e5defe388ba9" diff --git a/libs/genai/pyproject.toml b/libs/genai/pyproject.toml index 300662dc..5257dc7c 100644 --- a/libs/genai/pyproject.toml +++ b/libs/genai/pyproject.toml @@ -12,9 +12,10 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.9,<4.0" -langchain-core = ">=0.2.33,<0.3" +langchain-core = { version = "^0.3.0.dev2", allow-prereleases = true } google-generativeai = "^0.7.0" pillow = { version = "^10.1.0", optional = true } +pydantic = ">=2,<3" [tool.poetry.extras] images = ["pillow"] @@ -30,8 +31,8 @@ syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" numpy = "^1.26.2" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" } -langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } +langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests", branch = "v0.3/dev_releases" } [tool.codespell] ignore-words-list = "rouge" @@ -61,7 +62,7 @@ types-requests = "^2.28.11.5" types-google-cloud-ndb = "^2.2.0.1" types-pillow = "^10.1.0.2" types-protobuf = "^4.24.0.20240302" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } numpy = "^1.26.2" [tool.poetry.group.dev] @@ -72,7 +73,7 @@ pillow = "^10.1.0" types-requests = "^2.31.0.10" types-pillow = "^10.1.0.2" types-google-cloud-ndb = "^2.2.0.1" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } [tool.ruff.lint] select = [ From cb7eb3359111a10bc7942cafb28fe01c308351f2 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:56:28 -0400 Subject: [PATCH 03/22] to_pydantic_2 --- .../langchain_google_genai/_function_utils.py | 2 +- .../_genai_extension.py | 23 +++---------------- .../langchain_google_genai/chat_models.py | 7 +++--- .../langchain_google_genai/embeddings.py | 2 +- .../genai/langchain_google_genai/genai_aqa.py | 2 +- .../google_vector_store.py | 2 +- libs/genai/langchain_google_genai/llms.py | 2 +- .../integration_tests/test_chat_models.py | 2 +- .../integration_tests/test_embeddings.py | 2 +- .../integration_tests/test_function_call.py | 2 +- .../tests/unit_tests/test_chat_models.py | 2 +- .../genai/tests/unit_tests/test_embeddings.py | 2 +- .../tests/unit_tests/test_function_utils.py | 2 +- 13 files changed, 18 insertions(+), 34 deletions(-) diff --git a/libs/genai/langchain_google_genai/_function_utils.py b/libs/genai/langchain_google_genai/_function_utils.py index 3b0a9259..91f720fe 100644 --- a/libs/genai/langchain_google_genai/_function_utils.py +++ b/libs/genai/langchain_google_genai/_function_utils.py @@ -22,7 +22,7 @@ import google.ai.generativelanguage_v1beta.types as gapic import proto # type: ignore[import] from google.generativeai.types.content_types import ToolDict # type: ignore[import] -from langchain_core.pydantic_v1 import BaseModel +from pydantic import BaseModel from langchain_core.tools import BaseTool from langchain_core.tools import tool as callable_as_lc_tool from langchain_core.utils.function_calling import ( diff --git a/libs/genai/langchain_google_genai/_genai_extension.py b/libs/genai/langchain_google_genai/_genai_extension.py index 071e481b..ba3e7eed 100644 --- a/libs/genai/langchain_google_genai/_genai_extension.py +++ b/libs/genai/langchain_google_genai/_genai_extension.py @@ -23,6 +23,8 @@ from google.api_core import gapic_v1 from google.auth import credentials, exceptions # type: ignore from google.protobuf import timestamp_pb2 +from pydantic import ConfigDict + _logger = logging.getLogger(__name__) _DEFAULT_API_ENDPOINT = "generativelanguage.googleapis.com" @@ -131,26 +133,7 @@ def from_document(cls, d: genai.Document) -> "Document": @dataclass -class Config: - """Global configuration for Google Generative AI API. - - Normally, the defaults should work fine. Use this to pass Google Auth credentials - such as using a service account. Refer to for auth credentials documentation: - https://developers.google.com/identity/protocols/oauth2/service-account#creatinganaccount. - - Attributes: - api_endpoint: The Google Generative API endpoint address. - user_agent: The user agent to use for logging. - page_size: For paging RPCs, how many entities to return per RPC. - testing: Are the unit tests running? - auth_credentials: For setting credentials such as using service accounts. - """ - - api_endpoint: str = _DEFAULT_API_ENDPOINT - user_agent: str = _USER_AGENT - page_size: int = _DEFAULT_PAGE_SIZE - testing: bool = False - auth_credentials: Optional[credentials.Credentials] = None +model_config = ConfigDict(api_endpoint=_DEFAULT_API_ENDPOINT,user_agent=_USER_AGENT,page_size=_DEFAULT_PAGE_SIZE,testing=False,auth_credentials=None,) def set_config(config: Config) -> None: diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index 41a5b7d4..0d851306 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -75,7 +75,7 @@ parse_tool_calls, ) from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator from langchain_core.runnables import Runnable, RunnablePassthrough from langchain_core.utils import secret_from_env from langchain_core.utils.pydantic import is_basemodel_subclass @@ -103,6 +103,8 @@ from langchain_google_genai.llms import _BaseGoogleGenerativeAI from . import _genai_extension as genaix +from pydantic import ConfigDict + IMAGE_TYPES: Tuple = () try: @@ -832,8 +834,7 @@ class Joke(BaseModel): Gemini does not support system messages; any unsupported messages will raise an error.""" - class Config: - allow_population_by_field_name = True + model_config = ConfigDict(populate_by_name=True,) @property def lc_secrets(self) -> Dict[str, str]: diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 1dc2fc57..69c51706 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -8,7 +8,7 @@ EmbedContentRequest, ) from langchain_core.embeddings import Embeddings -from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator from langchain_core.utils import secret_from_env from langchain_google_genai._common import ( diff --git a/libs/genai/langchain_google_genai/genai_aqa.py b/libs/genai/langchain_google_genai/genai_aqa.py index c339f4d0..71294f68 100644 --- a/libs/genai/langchain_google_genai/genai_aqa.py +++ b/libs/genai/langchain_google_genai/genai_aqa.py @@ -9,7 +9,7 @@ from typing import Any, List, Optional import google.ai.generativelanguage as genai -from langchain_core.pydantic_v1 import BaseModel, PrivateAttr +from pydantic import BaseModel, PrivateAttr from langchain_core.runnables import RunnableSerializable from langchain_core.runnables.config import RunnableConfig diff --git a/libs/genai/langchain_google_genai/google_vector_store.py b/libs/genai/langchain_google_genai/google_vector_store.py index 79c75d15..6e569470 100644 --- a/libs/genai/langchain_google_genai/google_vector_store.py +++ b/libs/genai/langchain_google_genai/google_vector_store.py @@ -21,7 +21,7 @@ import google.ai.generativelanguage as genai from langchain_core.documents import Document from langchain_core.embeddings import Embeddings -from langchain_core.pydantic_v1 import BaseModel, PrivateAttr +from pydantic import BaseModel, PrivateAttr from langchain_core.runnables import Runnable, RunnableLambda, RunnablePassthrough from langchain_core.vectorstores import VectorStore diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index ffad41ca..9c0c410b 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -12,7 +12,7 @@ from langchain_core.language_models import LangSmithParams, LanguageModelInput from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.outputs import Generation, GenerationChunk, LLMResult -from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator from langchain_core.utils import secret_from_env from langchain_google_genai._enums import ( diff --git a/libs/genai/tests/integration_tests/test_chat_models.py b/libs/genai/tests/integration_tests/test_chat_models.py index fc388ac0..f79ee0ab 100644 --- a/libs/genai/tests/integration_tests/test_chat_models.py +++ b/libs/genai/tests/integration_tests/test_chat_models.py @@ -15,7 +15,7 @@ SystemMessage, ToolMessage, ) -from langchain_core.pydantic_v1 import BaseModel +from pydantic import BaseModel from langchain_core.tools import tool from langchain_standard_tests.integration_tests import ChatModelIntegrationTests diff --git a/libs/genai/tests/integration_tests/test_embeddings.py b/libs/genai/tests/integration_tests/test_embeddings.py index 1412ab18..07fe2195 100644 --- a/libs/genai/tests/integration_tests/test_embeddings.py +++ b/libs/genai/tests/integration_tests/test_embeddings.py @@ -1,6 +1,6 @@ import numpy as np import pytest -from langchain_core.pydantic_v1 import SecretStr +from pydantic import SecretStr from langchain_google_genai._common import GoogleGenerativeAIError from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings diff --git a/libs/genai/tests/integration_tests/test_function_call.py b/libs/genai/tests/integration_tests/test_function_call.py index 57d0005a..0014dd8d 100644 --- a/libs/genai/tests/integration_tests/test_function_call.py +++ b/libs/genai/tests/integration_tests/test_function_call.py @@ -3,7 +3,7 @@ import json from langchain_core.messages import AIMessage -from langchain_core.pydantic_v1 import BaseModel +from pydantic import BaseModel from langchain_core.tools import tool from langchain_google_genai.chat_models import ( diff --git a/libs/genai/tests/unit_tests/test_chat_models.py b/libs/genai/tests/unit_tests/test_chat_models.py index 7c398970..1113a31a 100644 --- a/libs/genai/tests/unit_tests/test_chat_models.py +++ b/libs/genai/tests/unit_tests/test_chat_models.py @@ -24,7 +24,7 @@ ToolMessage, ) from langchain_core.messages.tool import tool_call as create_tool_call -from langchain_core.pydantic_v1 import SecretStr +from pydantic import SecretStr from langchain_standard_tests.unit_tests import ChatModelUnitTests from pytest import CaptureFixture diff --git a/libs/genai/tests/unit_tests/test_embeddings.py b/libs/genai/tests/unit_tests/test_embeddings.py index ef901bd9..1b0ebb6e 100644 --- a/libs/genai/tests/unit_tests/test_embeddings.py +++ b/libs/genai/tests/unit_tests/test_embeddings.py @@ -8,7 +8,7 @@ ContentEmbedding, EmbedContentRequest, ) -from langchain_core.pydantic_v1 import SecretStr +from pydantic import SecretStr from pytest import CaptureFixture from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings diff --git a/libs/genai/tests/unit_tests/test_function_utils.py b/libs/genai/tests/unit_tests/test_function_utils.py index 786ad9f5..03c1111a 100644 --- a/libs/genai/tests/unit_tests/test_function_utils.py +++ b/libs/genai/tests/unit_tests/test_function_utils.py @@ -2,7 +2,7 @@ import google.ai.generativelanguage as glm import pytest -from langchain_core.pydantic_v1 import BaseModel +from pydantic import BaseModel from langchain_core.tools import tool from langchain_google_genai._function_utils import ( From 23e32829ececa16d6e5f834ce7a2b451d116af1f Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:57:26 -0400 Subject: [PATCH 04/22] model_after_rewrite --- .../langchain_google_genai/chat_models.py | 44 +++++++++---------- .../langchain_google_genai/embeddings.py | 16 +++---- libs/genai/langchain_google_genai/llms.py | 40 ++++++++--------- 3 files changed, 50 insertions(+), 50 deletions(-) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index 0d851306..b682d6d7 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -75,7 +75,7 @@ parse_tool_calls, ) from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from pydantic import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.runnables import Runnable, RunnablePassthrough from langchain_core.utils import secret_from_env from langchain_core.utils.pydantic import is_basemodel_subclass @@ -848,38 +848,38 @@ def _llm_type(self) -> str: def is_lc_serializable(self) -> bool: return True - @root_validator(pre=False, skip_on_failure=True) - def validate_environment(cls, values: Dict) -> Dict: + @model_validator(mode="after") + def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" if ( - values.get("temperature") is not None - and not 0 <= values["temperature"] <= 1 + (self.temperature or None) is not None + and not 0 <= self.temperature <= 1 ): raise ValueError("temperature must be in the range [0.0, 1.0]") - if values.get("top_p") is not None and not 0 <= values["top_p"] <= 1: + if (self.top_p or None) is not None and not 0 <= self.top_p <= 1: raise ValueError("top_p must be in the range [0.0, 1.0]") - if values.get("top_k") is not None and values["top_k"] <= 0: + if (self.top_k or None) is not None and self.top_k <= 0: raise ValueError("top_k must be positive") - if not values["model"].startswith("models/"): - values["model"] = f"models/{values['model']}" + if not self.model.startswith("models/"): + self.model = f"models/{self.model}" - additional_headers = values.get("additional_headers") or {} - values["default_metadata"] = tuple(additional_headers.items()) + additional_headers = (self.additional_headers or None) or {} + self.default_metadata = tuple(additional_headers.items()) client_info = get_client_info("ChatGoogleGenerativeAI") google_api_key = None - if not values.get("credentials"): - google_api_key = values.get("google_api_key") + if not (self.credentials or None): + google_api_key = (self.google_api_key or None) if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() - transport: Optional[str] = values.get("transport") - values["client"] = genaix.build_generative_service( - credentials=values.get("credentials"), + transport: Optional[str] = (self.transport or None) + self.client = genaix.build_generative_service( + credentials=(self.credentials or None), api_key=google_api_key, client_info=client_info, - client_options=values.get("client_options"), + client_options=(self.client_options or None), transport=transport, ) @@ -889,17 +889,17 @@ def validate_environment(cls, values: Dict) -> Dict: # this check ensures that async client is only initialized # within an asyncio event loop to avoid the error if _is_event_loop_running(): - values["async_client"] = genaix.build_generative_async_service( - credentials=values.get("credentials"), + self.async_client = genaix.build_generative_async_service( + credentials=(self.credentials or None), api_key=google_api_key, client_info=client_info, - client_options=values.get("client_options"), + client_options=(self.client_options or None), transport=transport, ) else: - values["async_client"] = None + self.async_client = None - return values + return self @property def _identifying_params(self) -> Dict[str, Any]: diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 69c51706..4eb83756 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -8,7 +8,7 @@ EmbedContentRequest, ) from langchain_core.embeddings import Embeddings -from pydantic import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.utils import secret_from_env from langchain_google_genai._common import ( @@ -82,21 +82,21 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings): "Example: `{'timeout': 10}`", ) - @root_validator(pre=False, skip_on_failure=True) - def validate_environment(cls, values: Dict) -> Dict: + @model_validator(mode="after") + def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - google_api_key = values.get("google_api_key") + google_api_key = (self.google_api_key or None) if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() client_info = get_client_info("GoogleGenerativeAIEmbeddings") - values["client"] = build_generative_service( - credentials=values.get("credentials"), + self.client = build_generative_service( + credentials=(self.credentials or None), api_key=google_api_key, client_info=client_info, - client_options=values.get("client_options"), + client_options=(self.client_options or None), ) - return values + return self @staticmethod def _split_by_punctuation(text: str) -> List[str]: diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index 9c0c410b..e2256d1f 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -12,7 +12,7 @@ from langchain_core.language_models import LangSmithParams, LanguageModelInput from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.outputs import Generation, GenerationChunk, LLMResult -from pydantic import BaseModel, Field, SecretStr, root_validator +from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.utils import secret_from_env from langchain_google_genai._enums import ( @@ -216,28 +216,28 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM): client: Any = None #: :meta private: - @root_validator(pre=False, skip_on_failure=True) - def validate_environment(cls, values: Dict) -> Dict: + @model_validator(mode="after") + def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - if values.get("credentials"): + if (self.credentials or None): genai.configure( - credentials=values.get("credentials"), - transport=values.get("transport"), - client_options=values.get("client_options"), + credentials=(self.credentials or None), + transport=(self.transport or None), + client_options=(self.client_options or None), ) else: - google_api_key = values.get("google_api_key") + google_api_key = (self.google_api_key or None) if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() genai.configure( api_key=google_api_key, - transport=values.get("transport"), - client_options=values.get("client_options"), + transport=(self.transport or None), + client_options=(self.client_options or None), ) - model_name = values["model"] + model_name = self.model - safety_settings = values["safety_settings"] + safety_settings = self.safety_settings if safety_settings and ( not GoogleModelFamily(model_name) == GoogleModelFamily.GEMINI @@ -245,28 +245,28 @@ def validate_environment(cls, values: Dict) -> Dict: raise ValueError("Safety settings are only supported for Gemini models") if GoogleModelFamily(model_name) == GoogleModelFamily.GEMINI: - values["client"] = genai.GenerativeModel( + self.client = genai.GenerativeModel( model_name=model_name, safety_settings=safety_settings ) else: - values["client"] = genai + self.client = genai - if values["temperature"] is not None and not 0 <= values["temperature"] <= 1: + if self.temperature is not None and not 0 <= self.temperature <= 1: raise ValueError("temperature must be in the range [0.0, 1.0]") - if values["top_p"] is not None and not 0 <= values["top_p"] <= 1: + if self.top_p is not None and not 0 <= self.top_p <= 1: raise ValueError("top_p must be in the range [0.0, 1.0]") - if values["top_k"] is not None and values["top_k"] <= 0: + if self.top_k is not None and self.top_k <= 0: raise ValueError("top_k must be positive") - if values["max_output_tokens"] is not None and values["max_output_tokens"] <= 0: + if self.max_output_tokens is not None and self.max_output_tokens <= 0: raise ValueError("max_output_tokens must be greater than zero") - if values["timeout"] is not None and values["timeout"] <= 0: + if self.timeout is not None and self.timeout <= 0: raise ValueError("timeout must be greater than zero") - return values + return self def _get_ls_params( self, stop: Optional[List[str]] = None, **kwargs: Any From d088c0aad8077b707808ffe33b339293a7c19d3f Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:57:36 -0400 Subject: [PATCH 05/22] Self --- libs/genai/langchain_google_genai/chat_models.py | 2 ++ libs/genai/langchain_google_genai/embeddings.py | 2 ++ libs/genai/langchain_google_genai/llms.py | 2 ++ 3 files changed, 6 insertions(+) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index b682d6d7..a1dc7b9e 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -104,6 +104,8 @@ from . import _genai_extension as genaix from pydantic import ConfigDict +from typing_extensions import Self + IMAGE_TYPES: Tuple = () diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 4eb83756..6730a71f 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -16,6 +16,8 @@ get_client_info, ) from langchain_google_genai._genai_extension import build_generative_service +from typing_extensions import Self + _MAX_TOKENS_PER_BATCH = 20000 _DEFAULT_BATCH_SIZE = 100 diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index e2256d1f..537d5f39 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -19,6 +19,8 @@ HarmBlockThreshold, HarmCategory, ) +from typing_extensions import Self + class GoogleModelFamily(str, Enum): From 06f295c7b3960d5923756f6ccb0fbea17247de53 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:58:36 -0400 Subject: [PATCH 06/22] revert change to _genai_extension --- .../_genai_extension.py | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/libs/genai/langchain_google_genai/_genai_extension.py b/libs/genai/langchain_google_genai/_genai_extension.py index ba3e7eed..071e481b 100644 --- a/libs/genai/langchain_google_genai/_genai_extension.py +++ b/libs/genai/langchain_google_genai/_genai_extension.py @@ -23,8 +23,6 @@ from google.api_core import gapic_v1 from google.auth import credentials, exceptions # type: ignore from google.protobuf import timestamp_pb2 -from pydantic import ConfigDict - _logger = logging.getLogger(__name__) _DEFAULT_API_ENDPOINT = "generativelanguage.googleapis.com" @@ -133,7 +131,26 @@ def from_document(cls, d: genai.Document) -> "Document": @dataclass -model_config = ConfigDict(api_endpoint=_DEFAULT_API_ENDPOINT,user_agent=_USER_AGENT,page_size=_DEFAULT_PAGE_SIZE,testing=False,auth_credentials=None,) +class Config: + """Global configuration for Google Generative AI API. + + Normally, the defaults should work fine. Use this to pass Google Auth credentials + such as using a service account. Refer to for auth credentials documentation: + https://developers.google.com/identity/protocols/oauth2/service-account#creatinganaccount. + + Attributes: + api_endpoint: The Google Generative API endpoint address. + user_agent: The user agent to use for logging. + page_size: For paging RPCs, how many entities to return per RPC. + testing: Are the unit tests running? + auth_credentials: For setting credentials such as using service accounts. + """ + + api_endpoint: str = _DEFAULT_API_ENDPOINT + user_agent: str = _USER_AGENT + page_size: int = _DEFAULT_PAGE_SIZE + testing: bool = False + auth_credentials: Optional[credentials.Credentials] = None def set_config(config: Config) -> None: From 9a653b17c830b24de13e6ee2ac4622a3722b1e28 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 10:59:11 -0400 Subject: [PATCH 07/22] format --- .../langchain_google_genai/_function_utils.py | 2 +- .../langchain_google_genai/chat_models.py | 27 ++++++++++--------- .../langchain_google_genai/embeddings.py | 7 +++-- .../genai/langchain_google_genai/genai_aqa.py | 2 +- .../google_vector_store.py | 2 +- libs/genai/langchain_google_genai/llms.py | 9 +++---- .../integration_tests/test_chat_models.py | 2 +- .../integration_tests/test_function_call.py | 2 +- .../tests/unit_tests/test_chat_models.py | 2 +- .../tests/unit_tests/test_function_utils.py | 2 +- 10 files changed, 29 insertions(+), 28 deletions(-) diff --git a/libs/genai/langchain_google_genai/_function_utils.py b/libs/genai/langchain_google_genai/_function_utils.py index 91f720fe..067c3e96 100644 --- a/libs/genai/langchain_google_genai/_function_utils.py +++ b/libs/genai/langchain_google_genai/_function_utils.py @@ -22,7 +22,6 @@ import google.ai.generativelanguage_v1beta.types as gapic import proto # type: ignore[import] from google.generativeai.types.content_types import ToolDict # type: ignore[import] -from pydantic import BaseModel from langchain_core.tools import BaseTool from langchain_core.tools import tool as callable_as_lc_tool from langchain_core.utils.function_calling import ( @@ -30,6 +29,7 @@ convert_to_openai_tool, ) from langchain_core.utils.json_schema import dereference_refs +from pydantic import BaseModel logger = logging.getLogger(__name__) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index a1dc7b9e..f09915d3 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -75,10 +75,17 @@ parse_tool_calls, ) from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult -from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.runnables import Runnable, RunnablePassthrough from langchain_core.utils import secret_from_env from langchain_core.utils.pydantic import is_basemodel_subclass +from pydantic import ( + BaseModel, + ConfigDict, + Field, + SecretStr, + model_validator, + root_validator, +) from tenacity import ( before_sleep_log, retry, @@ -86,6 +93,7 @@ stop_after_attempt, wait_exponential, ) +from typing_extensions import Self from langchain_google_genai._common import ( GoogleGenerativeAIError, @@ -103,10 +111,6 @@ from langchain_google_genai.llms import _BaseGoogleGenerativeAI from . import _genai_extension as genaix -from pydantic import ConfigDict -from typing_extensions import Self - - IMAGE_TYPES: Tuple = () try: @@ -836,7 +840,9 @@ class Joke(BaseModel): Gemini does not support system messages; any unsupported messages will raise an error.""" - model_config = ConfigDict(populate_by_name=True,) + model_config = ConfigDict( + populate_by_name=True, + ) @property def lc_secrets(self) -> Dict[str, str]: @@ -853,10 +859,7 @@ def is_lc_serializable(self) -> bool: @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - if ( - (self.temperature or None) is not None - and not 0 <= self.temperature <= 1 - ): + if (self.temperature or None) is not None and not 0 <= self.temperature <= 1: raise ValueError("temperature must be in the range [0.0, 1.0]") if (self.top_p or None) is not None and not 0 <= self.top_p <= 1: @@ -873,10 +876,10 @@ def validate_environment(self) -> Self: client_info = get_client_info("ChatGoogleGenerativeAI") google_api_key = None if not (self.credentials or None): - google_api_key = (self.google_api_key or None) + google_api_key = self.google_api_key or None if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() - transport: Optional[str] = (self.transport or None) + transport: Optional[str] = self.transport or None self.client = genaix.build_generative_service( credentials=(self.credentials or None), api_key=google_api_key, diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 6730a71f..385c3743 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -8,16 +8,15 @@ EmbedContentRequest, ) from langchain_core.embeddings import Embeddings -from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.utils import secret_from_env +from pydantic import BaseModel, Field, SecretStr, model_validator, root_validator +from typing_extensions import Self from langchain_google_genai._common import ( GoogleGenerativeAIError, get_client_info, ) from langchain_google_genai._genai_extension import build_generative_service -from typing_extensions import Self - _MAX_TOKENS_PER_BATCH = 20000 _DEFAULT_BATCH_SIZE = 100 @@ -87,7 +86,7 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings): @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - google_api_key = (self.google_api_key or None) + google_api_key = self.google_api_key or None if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() client_info = get_client_info("GoogleGenerativeAIEmbeddings") diff --git a/libs/genai/langchain_google_genai/genai_aqa.py b/libs/genai/langchain_google_genai/genai_aqa.py index 71294f68..9402f803 100644 --- a/libs/genai/langchain_google_genai/genai_aqa.py +++ b/libs/genai/langchain_google_genai/genai_aqa.py @@ -9,9 +9,9 @@ from typing import Any, List, Optional import google.ai.generativelanguage as genai -from pydantic import BaseModel, PrivateAttr from langchain_core.runnables import RunnableSerializable from langchain_core.runnables.config import RunnableConfig +from pydantic import BaseModel, PrivateAttr from . import _genai_extension as genaix diff --git a/libs/genai/langchain_google_genai/google_vector_store.py b/libs/genai/langchain_google_genai/google_vector_store.py index 6e569470..84a415bb 100644 --- a/libs/genai/langchain_google_genai/google_vector_store.py +++ b/libs/genai/langchain_google_genai/google_vector_store.py @@ -21,9 +21,9 @@ import google.ai.generativelanguage as genai from langchain_core.documents import Document from langchain_core.embeddings import Embeddings -from pydantic import BaseModel, PrivateAttr from langchain_core.runnables import Runnable, RunnableLambda, RunnablePassthrough from langchain_core.vectorstores import VectorStore +from pydantic import BaseModel, PrivateAttr from . import _genai_extension as genaix from .genai_aqa import ( diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index 537d5f39..1f8544d2 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -12,15 +12,14 @@ from langchain_core.language_models import LangSmithParams, LanguageModelInput from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.outputs import Generation, GenerationChunk, LLMResult -from pydantic import BaseModel, Field, SecretStr, root_validator, model_validator from langchain_core.utils import secret_from_env +from pydantic import BaseModel, Field, SecretStr, model_validator, root_validator +from typing_extensions import Self from langchain_google_genai._enums import ( HarmBlockThreshold, HarmCategory, ) -from typing_extensions import Self - class GoogleModelFamily(str, Enum): @@ -221,14 +220,14 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM): @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - if (self.credentials or None): + if self.credentials or None: genai.configure( credentials=(self.credentials or None), transport=(self.transport or None), client_options=(self.client_options or None), ) else: - google_api_key = (self.google_api_key or None) + google_api_key = self.google_api_key or None if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() genai.configure( diff --git a/libs/genai/tests/integration_tests/test_chat_models.py b/libs/genai/tests/integration_tests/test_chat_models.py index f79ee0ab..ec5e8db7 100644 --- a/libs/genai/tests/integration_tests/test_chat_models.py +++ b/libs/genai/tests/integration_tests/test_chat_models.py @@ -15,9 +15,9 @@ SystemMessage, ToolMessage, ) -from pydantic import BaseModel from langchain_core.tools import tool from langchain_standard_tests.integration_tests import ChatModelIntegrationTests +from pydantic import BaseModel from langchain_google_genai import ( ChatGoogleGenerativeAI, diff --git a/libs/genai/tests/integration_tests/test_function_call.py b/libs/genai/tests/integration_tests/test_function_call.py index 0014dd8d..27dbb283 100644 --- a/libs/genai/tests/integration_tests/test_function_call.py +++ b/libs/genai/tests/integration_tests/test_function_call.py @@ -3,8 +3,8 @@ import json from langchain_core.messages import AIMessage -from pydantic import BaseModel from langchain_core.tools import tool +from pydantic import BaseModel from langchain_google_genai.chat_models import ( ChatGoogleGenerativeAI, diff --git a/libs/genai/tests/unit_tests/test_chat_models.py b/libs/genai/tests/unit_tests/test_chat_models.py index 1113a31a..5c5066d1 100644 --- a/libs/genai/tests/unit_tests/test_chat_models.py +++ b/libs/genai/tests/unit_tests/test_chat_models.py @@ -24,8 +24,8 @@ ToolMessage, ) from langchain_core.messages.tool import tool_call as create_tool_call -from pydantic import SecretStr from langchain_standard_tests.unit_tests import ChatModelUnitTests +from pydantic import SecretStr from pytest import CaptureFixture from langchain_google_genai.chat_models import ( diff --git a/libs/genai/tests/unit_tests/test_function_utils.py b/libs/genai/tests/unit_tests/test_function_utils.py index 03c1111a..3e8a5e88 100644 --- a/libs/genai/tests/unit_tests/test_function_utils.py +++ b/libs/genai/tests/unit_tests/test_function_utils.py @@ -2,8 +2,8 @@ import google.ai.generativelanguage as glm import pytest -from pydantic import BaseModel from langchain_core.tools import tool +from pydantic import BaseModel from langchain_google_genai._function_utils import ( _tool_choice_to_tool_config, From e5dea34c356998b7f6c5bc6c514fd703a5b3d4c9 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:00:05 -0400 Subject: [PATCH 08/22] remove check_pydantic from MakeFile --- libs/genai/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/libs/genai/Makefile b/libs/genai/Makefile index 288310ec..c0729645 100644 --- a/libs/genai/Makefile +++ b/libs/genai/Makefile @@ -36,7 +36,6 @@ lint_tests: PYTHON_FILES=tests lint_tests: MYPY_CACHE=.mypy_cache_test lint lint_diff lint_package lint_tests: - ./scripts/check_pydantic.sh . ./scripts/lint_imports.sh poetry run ruff check . poetry run ruff format $(PYTHON_FILES) --diff From 2d631096daf9b1438b086f44d1f965fb85b2759d Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:00:39 -0400 Subject: [PATCH 09/22] remove unused imports --- libs/genai/langchain_google_genai/chat_models.py | 1 - libs/genai/langchain_google_genai/embeddings.py | 2 +- libs/genai/langchain_google_genai/llms.py | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index f09915d3..bfb4dd05 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -84,7 +84,6 @@ Field, SecretStr, model_validator, - root_validator, ) from tenacity import ( before_sleep_log, diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 385c3743..b97eff1d 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -9,7 +9,7 @@ ) from langchain_core.embeddings import Embeddings from langchain_core.utils import secret_from_env -from pydantic import BaseModel, Field, SecretStr, model_validator, root_validator +from pydantic import BaseModel, Field, SecretStr, model_validator from typing_extensions import Self from langchain_google_genai._common import ( diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index 1f8544d2..5913bff2 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -13,7 +13,7 @@ from langchain_core.language_models.llms import BaseLLM, create_base_retry_decorator from langchain_core.outputs import Generation, GenerationChunk, LLMResult from langchain_core.utils import secret_from_env -from pydantic import BaseModel, Field, SecretStr, model_validator, root_validator +from pydantic import BaseModel, Field, SecretStr, model_validator from typing_extensions import Self from langchain_google_genai._enums import ( From b6eafe6214b598bf84d521a9eb849f2278e21003 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:07:02 -0400 Subject: [PATCH 10/22] fix --- .../langchain_google_genai/chat_models.py | 22 +++++++++---------- .../langchain_google_genai/embeddings.py | 6 ++--- libs/genai/langchain_google_genai/llms.py | 14 ++++++------ 3 files changed, 21 insertions(+), 21 deletions(-) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index bfb4dd05..b78f2cee 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -858,32 +858,32 @@ def is_lc_serializable(self) -> bool: @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - if (self.temperature or None) is not None and not 0 <= self.temperature <= 1: + if self.temperature is not None and not 0 <= self.temperature <= 1: raise ValueError("temperature must be in the range [0.0, 1.0]") - if (self.top_p or None) is not None and not 0 <= self.top_p <= 1: + if self.top_p is not None and not 0 <= self.top_p <= 1: raise ValueError("top_p must be in the range [0.0, 1.0]") - if (self.top_k or None) is not None and self.top_k <= 0: + if self.top_k is not None and self.top_k <= 0: raise ValueError("top_k must be positive") if not self.model.startswith("models/"): self.model = f"models/{self.model}" - additional_headers = (self.additional_headers or None) or {} + additional_headers = self.additional_headers or {} self.default_metadata = tuple(additional_headers.items()) client_info = get_client_info("ChatGoogleGenerativeAI") google_api_key = None - if not (self.credentials or None): - google_api_key = self.google_api_key or None + if not self.credentials: + google_api_key = self.google_api_key if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() - transport: Optional[str] = self.transport or None + transport: Optional[str] = self.transport self.client = genaix.build_generative_service( - credentials=(self.credentials or None), + credentials=self.credentials, api_key=google_api_key, client_info=client_info, - client_options=(self.client_options or None), + client_options=self.client_options, transport=transport, ) @@ -894,10 +894,10 @@ def validate_environment(self) -> Self: # within an asyncio event loop to avoid the error if _is_event_loop_running(): self.async_client = genaix.build_generative_async_service( - credentials=(self.credentials or None), + credentials=self.credentials, api_key=google_api_key, client_info=client_info, - client_options=(self.client_options or None), + client_options=self.client_options, transport=transport, ) else: diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index b97eff1d..3a2c15c9 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -86,16 +86,16 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings): @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - google_api_key = self.google_api_key or None + google_api_key = self.google_api_key if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() client_info = get_client_info("GoogleGenerativeAIEmbeddings") self.client = build_generative_service( - credentials=(self.credentials or None), + credentials=self.credentials, api_key=google_api_key, client_info=client_info, - client_options=(self.client_options or None), + client_options=self.client_options, ) return self diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index 5913bff2..92554a3e 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -220,20 +220,20 @@ class GoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseLLM): @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - if self.credentials or None: + if self.credentials: genai.configure( - credentials=(self.credentials or None), - transport=(self.transport or None), - client_options=(self.client_options or None), + credentials=self.credentials, + transport=self.transport, + client_options=self.client_options, ) else: - google_api_key = self.google_api_key or None + google_api_key = self.google_api_key if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() genai.configure( api_key=google_api_key, - transport=(self.transport or None), - client_options=(self.client_options or None), + transport=self.transport, + client_options=self.client_options, ) model_name = self.model From 10cb97bad931b3f7c5ff613b464f65520741fb3e Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:07:15 -0400 Subject: [PATCH 11/22] upgrade mypy --- libs/genai/poetry.lock | 69 +++++++++++++++++++-------------------- libs/genai/pyproject.toml | 2 +- 2 files changed, 34 insertions(+), 37 deletions(-) diff --git a/libs/genai/poetry.lock b/libs/genai/poetry.lock index 1cc03631..519319b1 100644 --- a/libs/genai/poetry.lock +++ b/libs/genai/poetry.lock @@ -616,52 +616,49 @@ requests = ">=2,<3" [[package]] name = "mypy" -version = "0.991" +version = "1.11.2" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, + {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, + {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, + {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, + {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, + {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, + {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, + {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, + {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, + {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, + {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, + {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, + {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, + {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, + {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, + {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, + {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, + {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, + {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, + {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, + {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, + {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, + {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1538,4 +1535,4 @@ images = ["pillow"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "dbd21956a77f5416a98b8be28da9feccd8fe83481bae76bb1a32e5defe388ba9" +content-hash = "1ee875aa6ee4b42d3c5af1023953bb3e1a1f543bb13d6d0fd8c20d3676a7a8ff" diff --git a/libs/genai/pyproject.toml b/libs/genai/pyproject.toml index 5257dc7c..a5dac1f9 100644 --- a/libs/genai/pyproject.toml +++ b/libs/genai/pyproject.toml @@ -57,7 +57,7 @@ optional = true ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] -mypy = "^0.991" +mypy = "^1.10" types-requests = "^2.28.11.5" types-google-cloud-ndb = "^2.2.0.1" types-pillow = "^10.1.0.2" From f375dc9975fa26eed12820d3b76e1e95aba6f6f9 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:30:45 -0400 Subject: [PATCH 12/22] fix type hints --- .../langchain_google_genai/_function_utils.py | 2 +- .../langchain_google_genai/chat_models.py | 7 +++--- .../langchain_google_genai/embeddings.py | 7 +++--- .../google_vector_store.py | 2 +- libs/genai/langchain_google_genai/llms.py | 7 +++--- .../integration_tests/test_chat_models.py | 18 +++++++------- .../tests/integration_tests/test_llms.py | 8 +++---- .../tests/unit_tests/test_chat_models.py | 24 +++++++++++-------- libs/genai/tests/unit_tests/test_llms.py | 7 ++++-- 9 files changed, 46 insertions(+), 36 deletions(-) diff --git a/libs/genai/langchain_google_genai/_function_utils.py b/libs/genai/langchain_google_genai/_function_utils.py index 067c3e96..aae9c907 100644 --- a/libs/genai/langchain_google_genai/_function_utils.py +++ b/libs/genai/langchain_google_genai/_function_utils.py @@ -205,7 +205,7 @@ def _format_to_gapic_function_declaration( function = cast(dict, tool) function["parameters"] = {} else: - if "parameters" in tool and tool["parameters"].get("properties"): + if "parameters" in tool and tool["parameters"].get("properties"): # type: ignore[index] function = convert_to_openai_tool(cast(dict, tool))["function"] else: function = cast(dict, tool) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index b78f2cee..431cd2b8 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -875,9 +875,10 @@ def validate_environment(self) -> Self: client_info = get_client_info("ChatGoogleGenerativeAI") google_api_key = None if not self.credentials: - google_api_key = self.google_api_key - if isinstance(google_api_key, SecretStr): - google_api_key = google_api_key.get_secret_value() + if isinstance(self.google_api_key, SecretStr): + google_api_key = self.google_api_key.get_secret_value() + else: + google_api_key = self.google_api_key transport: Optional[str] = self.transport self.client = genaix.build_generative_service( credentials=self.credentials, diff --git a/libs/genai/langchain_google_genai/embeddings.py b/libs/genai/langchain_google_genai/embeddings.py index 3a2c15c9..a920c7ff 100644 --- a/libs/genai/langchain_google_genai/embeddings.py +++ b/libs/genai/langchain_google_genai/embeddings.py @@ -86,9 +86,10 @@ class GoogleGenerativeAIEmbeddings(BaseModel, Embeddings): @model_validator(mode="after") def validate_environment(self) -> Self: """Validates params and passes them to google-generativeai package.""" - google_api_key = self.google_api_key - if isinstance(google_api_key, SecretStr): - google_api_key = google_api_key.get_secret_value() + if isinstance(self.google_api_key, SecretStr): + google_api_key: Optional[str] = self.google_api_key.get_secret_value() + else: + google_api_key = self.google_api_key client_info = get_client_info("GoogleGenerativeAIEmbeddings") self.client = build_generative_service( diff --git a/libs/genai/langchain_google_genai/google_vector_store.py b/libs/genai/langchain_google_genai/google_vector_store.py index 84a415bb..ce584a88 100644 --- a/libs/genai/langchain_google_genai/google_vector_store.py +++ b/libs/genai/langchain_google_genai/google_vector_store.py @@ -467,7 +467,7 @@ def as_aqa(self, **kwargs: Any) -> Runnable[str, AqaOutput]: return ( RunnablePassthrough[str]() | { - "prompt": RunnablePassthrough(), + "prompt": RunnablePassthrough(), # type: ignore[dict-item] "passages": self.as_retriever(), } | RunnableLambda(_toAqaInput) diff --git a/libs/genai/langchain_google_genai/llms.py b/libs/genai/langchain_google_genai/llms.py index 92554a3e..18e208ff 100644 --- a/libs/genai/langchain_google_genai/llms.py +++ b/libs/genai/langchain_google_genai/llms.py @@ -227,9 +227,10 @@ def validate_environment(self) -> Self: client_options=self.client_options, ) else: - google_api_key = self.google_api_key - if isinstance(google_api_key, SecretStr): - google_api_key = google_api_key.get_secret_value() + if isinstance(self.google_api_key, SecretStr): + google_api_key: Optional[str] = self.google_api_key.get_secret_value() + else: + google_api_key = self.google_api_key genai.configure( api_key=google_api_key, transport=self.transport, diff --git a/libs/genai/tests/integration_tests/test_chat_models.py b/libs/genai/tests/integration_tests/test_chat_models.py index ec5e8db7..cd965be8 100644 --- a/libs/genai/tests/integration_tests/test_chat_models.py +++ b/libs/genai/tests/integration_tests/test_chat_models.py @@ -2,7 +2,7 @@ import asyncio import json -from typing import Generator, List, Optional, Type +from typing import Dict, Generator, List, Optional, Type import pytest from langchain_core.language_models import BaseChatModel @@ -268,8 +268,8 @@ def test_generativeai_get_num_tokens_gemini() -> None: def test_safety_settings_gemini() -> None: - safety_settings = { - HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, + safety_settings: Dict[HarmCategory, HarmBlockThreshold] = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE # type: ignore[dict-item] } # test with safety filters on bind llm = ChatGoogleGenerativeAI(temperature=0, model="gemini-pro").bind( @@ -310,8 +310,8 @@ def search( tools = [search] - safety = { - HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH + safety: Dict[HarmCategory, HarmBlockThreshold] = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH # type: ignore[dict-item] } llm = ChatGoogleGenerativeAI( model="models/gemini-1.5-pro-latest", safety_settings=safety @@ -382,8 +382,8 @@ class MyModel(BaseModel): name: str age: int - safety = { - HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH + safety: Dict[HarmCategory, HarmBlockThreshold] = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH # type: ignore[dict-item] } # Test .bind_tools with BaseModel message = HumanMessage(content="My name is Erick and I am 27 years old") @@ -442,8 +442,8 @@ class MyModel(BaseModel): name: str age: int - safety = { - HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH + safety: Dict[HarmCategory, HarmBlockThreshold] = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_ONLY_HIGH # type: ignore[dict-item] } llm = ChatGoogleGenerativeAI(model=model_name, safety_settings=safety) model = llm.with_structured_output(MyModel) diff --git a/libs/genai/tests/integration_tests/test_llms.py b/libs/genai/tests/integration_tests/test_llms.py index deb90b80..b816c35c 100644 --- a/libs/genai/tests/integration_tests/test_llms.py +++ b/libs/genai/tests/integration_tests/test_llms.py @@ -4,7 +4,7 @@ valid API key. """ -from typing import Generator +from typing import Dict, Generator import pytest from langchain_core.outputs import LLMResult @@ -23,7 +23,7 @@ def test_google_generativeai_call(model_name: str) -> None: if model_name: llm = GoogleGenerativeAI(max_output_tokens=10, model=model_name) else: - llm = GoogleGenerativeAI(max_output_tokens=10) + llm = GoogleGenerativeAI(max_output_tokens=10) # type: ignore[call-arg] output = llm("Say foo:") assert isinstance(output, str) assert llm._llm_type == "google_palm" @@ -69,8 +69,8 @@ def test_safety_settings_gemini() -> None: assert len(output.generations[0]) > 0 # safety filters - safety_settings = { - HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, + safety_settings: Dict[HarmCategory, HarmBlockThreshold] = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, # type: ignore[dict-item] } # test with safety filters directly to generate diff --git a/libs/genai/tests/unit_tests/test_chat_models.py b/libs/genai/tests/unit_tests/test_chat_models.py index 5c5066d1..3d4e979e 100644 --- a/libs/genai/tests/unit_tests/test_chat_models.py +++ b/libs/genai/tests/unit_tests/test_chat_models.py @@ -61,7 +61,7 @@ def test_integration_initialization() -> None: """Test chat model initialization.""" llm = ChatGoogleGenerativeAI( model="gemini-nano", - google_api_key=SecretStr("..."), + google_api_key=SecretStr("..."), # type: ignore[call-arg] top_k=2, top_p=1, temperature=0.7, @@ -77,7 +77,7 @@ def test_integration_initialization() -> None: llm = ChatGoogleGenerativeAI( model="gemini-nano", - google_api_key=SecretStr("..."), + google_api_key=SecretStr("..."), # type: ignore[call-arg] max_output_tokens=10, ) ls_params = llm._get_ls_params() @@ -91,7 +91,7 @@ def test_integration_initialization() -> None: ChatGoogleGenerativeAI( model="gemini-nano", - google_api_key=SecretStr("..."), + api_key=SecretStr("..."), top_k=2, top_p=1, temperature=0.7, @@ -105,13 +105,14 @@ def test_initialization_inside_threadpool() -> None: executor.submit( ChatGoogleGenerativeAI, model="gemini-nano", - google_api_key=SecretStr("secret-api-key"), + google_api_key=SecretStr("secret-api-key"), # type: ignore[call-arg] ).result() def test_initalization_without_async() -> None: chat = ChatGoogleGenerativeAI( - model="gemini-nano", google_api_key=SecretStr("secret-api-key") + model="gemini-nano", + google_api_key=SecretStr("secret-api-key"), # type: ignore[call-arg] ) assert chat.async_client is None @@ -119,7 +120,8 @@ def test_initalization_without_async() -> None: def test_initialization_with_async() -> None: async def initialize_chat_with_async_client() -> ChatGoogleGenerativeAI: return ChatGoogleGenerativeAI( - model="gemini-nano", google_api_key=SecretStr("secret-api-key") + model="gemini-nano", + google_api_key=SecretStr("secret-api-key"), # type: ignore[call-arg] ) loop = asyncio.get_event_loop() @@ -129,14 +131,16 @@ async def initialize_chat_with_async_client() -> ChatGoogleGenerativeAI: def test_api_key_is_string() -> None: chat = ChatGoogleGenerativeAI( - model="gemini-nano", google_api_key=SecretStr("secret-api-key") + model="gemini-nano", + google_api_key=SecretStr("secret-api-key"), # type: ignore[call-arg] ) assert isinstance(chat.google_api_key, SecretStr) def test_api_key_masked_when_passed_via_constructor(capsys: CaptureFixture) -> None: chat = ChatGoogleGenerativeAI( - model="gemini-nano", google_api_key=SecretStr("secret-api-key") + model="gemini-nano", + google_api_key=SecretStr("secret-api-key"), # type: ignore[call-arg] ) print(chat.google_api_key, end="") # noqa: T201 captured = capsys.readouterr() @@ -289,7 +293,7 @@ def test_additional_headers_support(headers: Optional[Dict[str, str]]) -> None: ): chat = ChatGoogleGenerativeAI( model="gemini-pro", - google_api_key=param_secret_api_key, + google_api_key=param_secret_api_key, # type: ignore[call-arg] client_options=param_client_options, transport=param_transport, additional_headers=headers, @@ -565,7 +569,7 @@ def test_parse_response_candidate(raw_candidate: Dict, expected: AIMessage) -> N def test_serialize() -> None: - llm = ChatGoogleGenerativeAI(model="gemini-pro-1.5", google_api_key="test-key") + llm = ChatGoogleGenerativeAI(model="gemini-pro-1.5", google_api_key="test-key") # type: ignore[call-arg] serialized = dumps(llm) llm_loaded = loads( serialized, diff --git a/libs/genai/tests/unit_tests/test_llms.py b/libs/genai/tests/unit_tests/test_llms.py index c4e4327f..29a68980 100644 --- a/libs/genai/tests/unit_tests/test_llms.py +++ b/libs/genai/tests/unit_tests/test_llms.py @@ -10,7 +10,7 @@ def test_model_family() -> None: def test_tracing_params() -> None: # Test standard tracing params - llm = GoogleGenerativeAI(model="gemini-pro", google_api_key="foo") + llm = GoogleGenerativeAI(model="gemini-pro", google_api_key="foo") # type: ignore[call-arg] ls_params = llm._get_ls_params() assert ls_params == { "ls_provider": "google_genai", @@ -20,7 +20,10 @@ def test_tracing_params() -> None: } llm = GoogleGenerativeAI( - model="gemini-pro", temperature=0.1, max_output_tokens=10, google_api_key="foo" + model="gemini-pro", + temperature=0.1, + max_output_tokens=10, + google_api_key="foo", # type: ignore[call-arg] ) ls_params = llm._get_ls_params() assert ls_params == { From d0441e820cdc409f15036b8fb67a18bfca621431 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:40:37 -0400 Subject: [PATCH 13/22] update serialization test --- libs/genai/tests/unit_tests/test_chat_models.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/libs/genai/tests/unit_tests/test_chat_models.py b/libs/genai/tests/unit_tests/test_chat_models.py index 3d4e979e..6d756feb 100644 --- a/libs/genai/tests/unit_tests/test_chat_models.py +++ b/libs/genai/tests/unit_tests/test_chat_models.py @@ -576,4 +576,7 @@ def test_serialize() -> None: secrets_map={"GOOGLE_API_KEY": "test-key"}, valid_namespaces=["langchain_google_genai"], ) + # Pydantic 2 equality will fail on complex attributes like clients with different IDs + llm.client = None + llm_loaded.client = None assert llm == llm_loaded From c6dd7455eb7d49b8a353dca4bdcf8bd0bdc6f086 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 11:52:56 -0400 Subject: [PATCH 14/22] add test_watch to makefile --- libs/genai/Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/libs/genai/Makefile b/libs/genai/Makefile index c0729645..7c4a6e58 100644 --- a/libs/genai/Makefile +++ b/libs/genai/Makefile @@ -14,6 +14,9 @@ test tests integration_test integration_tests: check_imports: $(shell find langchain_google_genai -name '*.py') poetry run python ./scripts/check_imports.py $^ +test_watch: + poetry run ptw --snapshot-update --now . -- -vv $(TEST_FILE) + # Run unit tests and generate a coverage report. coverage: poetry run pytest --cov \ From c096fb855aa6144f4032606d855f791f89389dd2 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 12:32:45 -0400 Subject: [PATCH 15/22] merge --- libs/genai/tests/integration_tests/test_chat_models.py | 3 +-- libs/genai/tests/unit_tests/test_chat_models.py | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/libs/genai/tests/integration_tests/test_chat_models.py b/libs/genai/tests/integration_tests/test_chat_models.py index a4fada0a..a3ce80a4 100644 --- a/libs/genai/tests/integration_tests/test_chat_models.py +++ b/libs/genai/tests/integration_tests/test_chat_models.py @@ -2,7 +2,7 @@ import asyncio import json -from typing import Dict, Generator, List, Optional, Type +from typing import Dict, Generator, List, Optional import pytest from langchain_core.messages import ( @@ -15,7 +15,6 @@ ToolMessage, ) from langchain_core.tools import tool -from langchain_standard_tests.integration_tests import ChatModelIntegrationTests from pydantic import BaseModel from langchain_google_genai import ( diff --git a/libs/genai/tests/unit_tests/test_chat_models.py b/libs/genai/tests/unit_tests/test_chat_models.py index d261c3de..d23bfc31 100644 --- a/libs/genai/tests/unit_tests/test_chat_models.py +++ b/libs/genai/tests/unit_tests/test_chat_models.py @@ -23,7 +23,6 @@ ToolMessage, ) from langchain_core.messages.tool import tool_call as create_tool_call -from langchain_standard_tests.unit_tests import ChatModelUnitTests from pydantic import SecretStr from pytest import CaptureFixture @@ -553,7 +552,8 @@ def test_serialize() -> None: secrets_map={"GOOGLE_API_KEY": "test-key"}, valid_namespaces=["langchain_google_genai"], ) - # Pydantic 2 equality will fail on complex attributes like clients with different IDs + # Pydantic 2 equality will fail on complex attributes like clients with + # different IDs llm.client = None llm_loaded.client = None assert llm == llm_loaded From 44b0c3a57339744af302894a15328ed9ed4efd27 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 13:06:45 -0400 Subject: [PATCH 16/22] add snapshots --- .../__snapshots__/test_standard.ambr | 129 ++++++++++++++++++ libs/genai/tests/unit_tests/test_standard.py | 18 ++- 2 files changed, 146 insertions(+), 1 deletion(-) create mode 100644 libs/genai/tests/unit_tests/__snapshots__/test_standard.ambr diff --git a/libs/genai/tests/unit_tests/__snapshots__/test_standard.ambr b/libs/genai/tests/unit_tests/__snapshots__/test_standard.ambr new file mode 100644 index 00000000..c948d228 --- /dev/null +++ b/libs/genai/tests/unit_tests/__snapshots__/test_standard.ambr @@ -0,0 +1,129 @@ +# serializer version: 1 +# name: TestGeminiAIStandard.test_serdes[serialized] + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'ChatGoogleGenerativeAIInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain_google_genai', + 'chat_models', + 'ChatGoogleGenerativeAI', + ]), + 'name': 'ChatGoogleGenerativeAI', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ChatGoogleGenerativeAIOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain_google_genai', + 'chat_models', + 'ChatGoogleGenerativeAI', + ]), + 'kwargs': dict({ + 'default_metadata': list([ + ]), + 'google_api_key': dict({ + 'id': list([ + 'GOOGLE_API_KEY', + ]), + 'lc': 1, + 'type': 'secret', + }), + 'max_retries': 2, + 'model': 'models/gemini-1.0-pro-001', + 'n': 1, + 'temperature': 0.0, + 'timeout': 60.0, + }), + 'lc': 1, + 'name': 'ChatGoogleGenerativeAI', + 'type': 'constructor', + }) +# --- +# name: TestGemini_15_AIStandard.test_serdes[serialized] + dict({ + 'graph': dict({ + 'edges': list([ + dict({ + 'source': 0, + 'target': 1, + }), + dict({ + 'source': 1, + 'target': 2, + }), + ]), + 'nodes': list([ + dict({ + 'data': 'ChatGoogleGenerativeAIInput', + 'id': 0, + 'type': 'schema', + }), + dict({ + 'data': dict({ + 'id': list([ + 'langchain_google_genai', + 'chat_models', + 'ChatGoogleGenerativeAI', + ]), + 'name': 'ChatGoogleGenerativeAI', + }), + 'id': 1, + 'type': 'runnable', + }), + dict({ + 'data': 'ChatGoogleGenerativeAIOutput', + 'id': 2, + 'type': 'schema', + }), + ]), + }), + 'id': list([ + 'langchain_google_genai', + 'chat_models', + 'ChatGoogleGenerativeAI', + ]), + 'kwargs': dict({ + 'default_metadata': list([ + ]), + 'google_api_key': dict({ + 'id': list([ + 'GOOGLE_API_KEY', + ]), + 'lc': 1, + 'type': 'secret', + }), + 'max_retries': 2, + 'model': 'models/gemini-1.5-pro-001', + 'n': 1, + 'temperature': 0.0, + 'timeout': 60.0, + }), + 'lc': 1, + 'name': 'ChatGoogleGenerativeAI', + 'type': 'constructor', + }) +# --- diff --git a/libs/genai/tests/unit_tests/test_standard.py b/libs/genai/tests/unit_tests/test_standard.py index fd8b79cc..098bf053 100644 --- a/libs/genai/tests/unit_tests/test_standard.py +++ b/libs/genai/tests/unit_tests/test_standard.py @@ -1,4 +1,4 @@ -from typing import Type +from typing import Tuple, Type from langchain_core.language_models import BaseChatModel from langchain_standard_tests.unit_tests import ChatModelUnitTests @@ -15,6 +15,14 @@ def chat_model_class(self) -> Type[BaseChatModel]: def chat_model_params(self) -> dict: return {"model": "models/gemini-1.0-pro-001"} + @property + def init_from_env_params(self) -> Tuple[dict, dict, dict]: + return ( + {"GOOGLE_API_KEY": "api_key"}, + self.chat_model_params, + {"google_api_key": "api_key"}, + ) + class TestGemini_15_AIStandard(ChatModelUnitTests): @property @@ -24,3 +32,11 @@ def chat_model_class(self) -> Type[BaseChatModel]: @property def chat_model_params(self) -> dict: return {"model": "models/gemini-1.5-pro-001"} + + @property + def init_from_env_params(self) -> Tuple[dict, dict, dict]: + return ( + {"GOOGLE_API_KEY": "api_key"}, + self.chat_model_params, + {"google_api_key": "api_key"}, + ) From 80b8348b9077a6a5862f654676ac04267b402059 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 6 Sep 2024 16:37:21 -0400 Subject: [PATCH 17/22] schema -> model_json_schema --- .../langchain_google_genai/_function_utils.py | 20 +++++++++++++++++-- .../integration_tests/test_chat_models.py | 6 +++++- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/libs/genai/langchain_google_genai/_function_utils.py b/libs/genai/langchain_google_genai/_function_utils.py index aae9c907..5ac6c52c 100644 --- a/libs/genai/langchain_google_genai/_function_utils.py +++ b/libs/genai/langchain_google_genai/_function_utils.py @@ -30,6 +30,7 @@ ) from langchain_core.utils.json_schema import dereference_refs from pydantic import BaseModel +from pydantic.v1 import BaseModel as BaseModelV1 logger = logging.getLogger(__name__) @@ -232,7 +233,14 @@ def _format_base_tool_to_function_declaration( ), ) - schema = tool.args_schema.schema() + if issubclass(tool.args_schema, BaseModel): + schema = tool.args_schema.model_json_schema() + elif issubclass(tool.args_schema, BaseModelV1): + schema = tool.args_schema.schema() + else: + raise NotImplementedError( + f"args_schema must be a Pydantic BaseModel, got {tool.args_schema}." + ) parameters = _dict_to_gapic_schema(schema) return gapic.FunctionDeclaration( @@ -247,7 +255,15 @@ def _convert_pydantic_to_genai_function( tool_name: Optional[str] = None, tool_description: Optional[str] = None, ) -> gapic.FunctionDeclaration: - schema = dereference_refs(pydantic_model.schema()) + if issubclass(pydantic_model, BaseModel): + schema = pydantic_model.model_json_schema() + elif issubclass(pydantic_model, BaseModelV1): + schema = pydantic_model.schema() + else: + raise NotImplementedError( + f"pydantic_model must be a Pydantic BaseModel, got {pydantic_model}" + ) + schema = dereference_refs(schema) schema.pop("definitions", None) function_declaration = gapic.FunctionDeclaration( name=tool_name if tool_name else schema.get("title"), diff --git a/libs/genai/tests/integration_tests/test_chat_models.py b/libs/genai/tests/integration_tests/test_chat_models.py index a3ce80a4..dc62d09a 100644 --- a/libs/genai/tests/integration_tests/test_chat_models.py +++ b/libs/genai/tests/integration_tests/test_chat_models.py @@ -430,7 +430,11 @@ class MyModel(BaseModel): assert response == MyModel(name="Erick", age=27) model = llm.with_structured_output( - {"name": "MyModel", "description": "MyModel", "parameters": MyModel.schema()} + { + "name": "MyModel", + "description": "MyModel", + "parameters": MyModel.model_json_schema(), + } ) response = model.invoke([message]) expected = [ From 2589fd68abf99f30a4061bf12d33700b2dc66180 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Sun, 8 Sep 2024 10:57:59 -0400 Subject: [PATCH 18/22] add integration test workflow --- .github/workflows/_integration_test.yml | 64 +++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 .github/workflows/_integration_test.yml diff --git a/.github/workflows/_integration_test.yml b/.github/workflows/_integration_test.yml new file mode 100644 index 00000000..0c680c55 --- /dev/null +++ b/.github/workflows/_integration_test.yml @@ -0,0 +1,64 @@ +name: Integration tests + +on: + workflow_dispatch: + inputs: + working-directory: + required: true + type: string + python-version: + required: true + type: string + description: "Python version to use" + +env: + POETRY_VERSION: "1.7.1" + +jobs: + build: + defaults: + run: + working-directory: ${{ inputs.working-directory }} + runs-on: ubuntu-latest + name: Python ${{ inputs.python-version }} + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }} + uses: "./.github/actions/poetry_setup" + with: + python-version: ${{ inputs.python-version }} + poetry-version: ${{ env.POETRY_VERSION }} + working-directory: ${{ inputs.working-directory }} + cache-key: core + + - name: Install dependencies + shell: bash + run: poetry install --with test,test_integration + + - name: 'Authenticate to Google Cloud' + id: 'auth' + uses: google-github-actions/auth@v2 + with: + credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}' + + - name: Run integration tests + shell: bash + env: + GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} + GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }} + GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} + run: | + make integration_tests + + - name: Ensure the tests did not create any additional files + shell: bash + run: | + set -eu + + STATUS="$(git status)" + echo "$STATUS" + + # grep will exit non-zero if the target message isn't found, + # and `set -e` above will cause the step to fail. + echo "$STATUS" | grep 'nothing to commit, working tree clean' From 6844a2461828505ce84309673b1aec1785447f75 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Sun, 8 Sep 2024 10:58:59 -0400 Subject: [PATCH 19/22] Revert "add integration test workflow" This reverts commit 2589fd68abf99f30a4061bf12d33700b2dc66180. --- .github/workflows/_integration_test.yml | 64 ------------------------- 1 file changed, 64 deletions(-) delete mode 100644 .github/workflows/_integration_test.yml diff --git a/.github/workflows/_integration_test.yml b/.github/workflows/_integration_test.yml deleted file mode 100644 index 0c680c55..00000000 --- a/.github/workflows/_integration_test.yml +++ /dev/null @@ -1,64 +0,0 @@ -name: Integration tests - -on: - workflow_dispatch: - inputs: - working-directory: - required: true - type: string - python-version: - required: true - type: string - description: "Python version to use" - -env: - POETRY_VERSION: "1.7.1" - -jobs: - build: - defaults: - run: - working-directory: ${{ inputs.working-directory }} - runs-on: ubuntu-latest - name: Python ${{ inputs.python-version }} - steps: - - uses: actions/checkout@v4 - - - name: Set up Python ${{ inputs.python-version }} + Poetry ${{ env.POETRY_VERSION }} - uses: "./.github/actions/poetry_setup" - with: - python-version: ${{ inputs.python-version }} - poetry-version: ${{ env.POETRY_VERSION }} - working-directory: ${{ inputs.working-directory }} - cache-key: core - - - name: Install dependencies - shell: bash - run: poetry install --with test,test_integration - - - name: 'Authenticate to Google Cloud' - id: 'auth' - uses: google-github-actions/auth@v2 - with: - credentials_json: '${{ secrets.GOOGLE_CREDENTIALS }}' - - - name: Run integration tests - shell: bash - env: - GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} - GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }} - GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} - run: | - make integration_tests - - - name: Ensure the tests did not create any additional files - shell: bash - run: | - set -eu - - STATUS="$(git status)" - echo "$STATUS" - - # grep will exit non-zero if the target message isn't found, - # and `set -e` above will cause the step to fail. - echo "$STATUS" | grep 'nothing to commit, working tree clean' From 6828ddb74a498b3aa173a5ffa8c759b99c94ca2b Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Sun, 8 Sep 2024 11:06:17 -0400 Subject: [PATCH 20/22] lock --- libs/genai/poetry.lock | 30 ++++++++++++++---------------- libs/genai/pyproject.toml | 10 +++++----- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/libs/genai/poetry.lock b/libs/genai/poetry.lock index 519319b1..127eb6fb 100644 --- a/libs/genai/poetry.lock +++ b/libs/genai/poetry.lock @@ -547,7 +547,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.3.0.dev2" +version = "0.3.0.dev4" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.9,<4.0" @@ -556,12 +556,9 @@ develop = false [package.dependencies] jsonpatch = "^1.33" -langsmith = "^0.1.75" +langsmith = "^0.1.112" packaging = ">=23.2,<25" -pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, - {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, -] +pydantic = "^2.7.4" PyYAML = ">=5.3" tenacity = "^8.1.0,!=8.4.0" typing-extensions = ">=4.7" @@ -569,8 +566,8 @@ typing-extensions = ">=4.7" [package.source] type = "git" url = "https://github.com/langchain-ai/langchain.git" -reference = "v0.3/dev_releases" -resolved_reference = "70f9d5a1f36114834aa6ae80b66ee21dd302b2a4" +reference = "v0.3rc" +resolved_reference = "7a57b4fbbfa6470c0c191e6e8de6af36597ea89d" subdirectory = "libs/core" [[package]] @@ -591,26 +588,27 @@ syrupy = "^4" [package.source] type = "git" url = "https://github.com/langchain-ai/langchain.git" -reference = "v0.3/dev_releases" -resolved_reference = "70f9d5a1f36114834aa6ae80b66ee21dd302b2a4" +reference = "v0.3rc" +resolved_reference = "7a57b4fbbfa6470c0c191e6e8de6af36597ea89d" subdirectory = "libs/standard-tests" [[package]] name = "langsmith" -version = "0.1.99" +version = "0.1.116" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.99-py3-none-any.whl", hash = "sha256:ef8d1d74a2674c514aa429b0171a9fbb661207dc3835142cca0e8f1bf97b26b0"}, - {file = "langsmith-0.1.99.tar.gz", hash = "sha256:b5c6a1f158abda61600a4a445081ee848b4a28b758d91f2793dc02aeffafcaf1"}, + {file = "langsmith-0.1.116-py3-none-any.whl", hash = "sha256:4b5ea64c81ba5ca309695c85dc3fb4617429a985129ed5d9eca00d1c9d6483f4"}, + {file = "langsmith-0.1.116.tar.gz", hash = "sha256:5ccd7f5c1840f7c507ab3ee56334a1391de28c8bf72669782e2d82cafeefffa7"}, ] [package.dependencies] +httpx = ">=0.23.0,<1" orjson = ">=3.9.14,<4.0.0" pydantic = [ - {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, ] requests = ">=2,<3" @@ -983,8 +981,8 @@ files = [ annotated-types = ">=0.4.0" pydantic-core = "2.20.1" typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, ] [package.extras] @@ -1535,4 +1533,4 @@ images = ["pillow"] [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "1ee875aa6ee4b42d3c5af1023953bb3e1a1f543bb13d6d0fd8c20d3676a7a8ff" +content-hash = "58167fdcf43aca65e0f09b8e87a6b226cd37a474710fd4c9920af8a1640fe891" diff --git a/libs/genai/pyproject.toml b/libs/genai/pyproject.toml index a5dac1f9..c5a0f126 100644 --- a/libs/genai/pyproject.toml +++ b/libs/genai/pyproject.toml @@ -12,7 +12,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.9,<4.0" -langchain-core = { version = "^0.3.0.dev2", allow-prereleases = true } +langchain-core = { version = "^0.3.0.dev4", allow-prereleases = true } google-generativeai = "^0.7.0" pillow = { version = "^10.1.0", optional = true } pydantic = ">=2,<3" @@ -31,8 +31,8 @@ syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" numpy = "^1.26.2" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } -langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests", branch = "v0.3/dev_releases" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3rc" } +langchain-standard-tests = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/standard-tests", branch = "v0.3rc" } [tool.codespell] ignore-words-list = "rouge" @@ -62,7 +62,7 @@ types-requests = "^2.28.11.5" types-google-cloud-ndb = "^2.2.0.1" types-pillow = "^10.1.0.2" types-protobuf = "^4.24.0.20240302" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3rc" } numpy = "^1.26.2" [tool.poetry.group.dev] @@ -73,7 +73,7 @@ pillow = "^10.1.0" types-requests = "^2.31.0.10" types-pillow = "^10.1.0.2" types-google-cloud-ndb = "^2.2.0.1" -langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3/dev_releases" } +langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core", branch = "v0.3rc" } [tool.ruff.lint] select = [ From 46675b0fb6344398e35de81680ca2ca7d779df33 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Mon, 9 Sep 2024 12:25:29 -0400 Subject: [PATCH 21/22] increment version to 2.0.0.dev1 --- libs/genai/pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/genai/pyproject.toml b/libs/genai/pyproject.toml index c5a0f126..f632fdf5 100644 --- a/libs/genai/pyproject.toml +++ b/libs/genai/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-google-genai" -version = "1.0.10" +version = "2.0.0.dev1" description = "An integration package connecting Google's genai package and LangChain" authors = [] readme = "README.md" From 290a90b1ae6c92b44d9370c1e7a257894235b30f Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Mon, 9 Sep 2024 16:22:23 -0400 Subject: [PATCH 22/22] update docstrings --- libs/genai/langchain_google_genai/chat_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/genai/langchain_google_genai/chat_models.py b/libs/genai/langchain_google_genai/chat_models.py index 431cd2b8..80204c72 100644 --- a/libs/genai/langchain_google_genai/chat_models.py +++ b/libs/genai/langchain_google_genai/chat_models.py @@ -703,7 +703,7 @@ class ChatGoogleGenerativeAI(_BaseGoogleGenerativeAI, BaseChatModel): Tool calling: .. code-block:: python - from langchain_core.pydantic_v1 import BaseModel, Field + from pydantic import BaseModel, Field class GetWeather(BaseModel): @@ -748,7 +748,7 @@ class GetPopulation(BaseModel): from typing import Optional - from langchain_core.pydantic_v1 import BaseModel, Field + from pydantic import BaseModel, Field class Joke(BaseModel):