Skip to content

Commit

Permalink
genai: update pyproject.toml same as vertexai
Browse files Browse the repository at this point in the history
  • Loading branch information
nobu007 committed Nov 19, 2024
1 parent 9f520cd commit 5779ee2
Show file tree
Hide file tree
Showing 13 changed files with 117 additions and 63 deletions.
14 changes: 8 additions & 6 deletions libs/genai/langchain_google_genai/_function_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,10 @@

import google.ai.generativelanguage as glm
import google.ai.generativelanguage_v1beta.types as gapic
import proto # type: ignore[import]
from google.generativeai.types.content_types import ToolDict # type: ignore[import]
import proto # type: ignore[import-untyped]
from google.generativeai.types.content_types import ( # type: ignore[import-untyped]
ToolDict,
)
from langchain_core.tools import BaseTool
from langchain_core.tools import tool as callable_as_lc_tool
from langchain_core.utils.function_calling import (
Expand Down Expand Up @@ -144,7 +146,7 @@ def convert_to_genai_function_declarations(
tools: Sequence[_ToolsType],
) -> gapic.Tool:
if not isinstance(tools, collections.abc.Sequence):
logger.warning(
logger.warning( # type: ignore[unreachable]
"convert_to_genai_function_declarations expects a Sequence "
"and not a single tool."
)
Expand Down Expand Up @@ -237,7 +239,7 @@ def _format_base_tool_to_function_declaration(

if issubclass(tool.args_schema, BaseModel):
schema = tool.args_schema.model_json_schema()
elif issubclass(tool.args_schema, BaseModelV1):
elif issubclass(tool.args_schema, BaseModelV1): # type: ignore[unreachable]
schema = tool.args_schema.schema()
else:
raise NotImplementedError(
Expand All @@ -259,7 +261,7 @@ def _convert_pydantic_to_genai_function(
) -> gapic.FunctionDeclaration:
if issubclass(pydantic_model, BaseModel):
schema = pydantic_model.model_json_schema()
elif issubclass(pydantic_model, BaseModelV1):
elif issubclass(pydantic_model, BaseModelV1): # type: ignore[unreachable]
schema = pydantic_model.schema()
else:
raise NotImplementedError(
Expand Down Expand Up @@ -460,7 +462,7 @@ def _tool_choice_to_tool_config(
def is_basemodel_subclass_safe(tool: Type) -> bool:
if safe_import("langchain_core.utils.pydantic", "is_basemodel_subclass"):
from langchain_core.utils.pydantic import (
is_basemodel_subclass, # type: ignore[import]
is_basemodel_subclass,
)

return is_basemodel_subclass(tool)
Expand Down
2 changes: 1 addition & 1 deletion libs/genai/langchain_google_genai/_genai_extension.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as gapi_exception
from google.api_core import gapic_v1
from google.auth import credentials, exceptions # type: ignore
from google.auth import credentials, exceptions
from google.protobuf import timestamp_pb2

_logger = logging.getLogger(__name__)
Expand Down
2 changes: 1 addition & 1 deletion libs/genai/langchain_google_genai/_image_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def load_bytes(self, image_string: str) -> bytes:
"Please pass in images as Google Cloud Storage URI, "
"b64 encoded image string (data:image/...), or valid image url."
)
return self._bytes_from_file(image_string)
return self._bytes_from_file(image_string) # type: ignore[unreachable]

raise ValueError(
"Image string must be one of: Google Cloud Storage URI, "
Expand Down
20 changes: 10 additions & 10 deletions libs/genai/langchain_google_genai/chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
import google.api_core

# TODO: remove ignore once the google package is published with types
import proto # type: ignore[import]
import proto # type: ignore[import-untyped]
from google.ai.generativelanguage_v1beta import (
GenerativeServiceAsyncClient as v1betaGenerativeServiceAsyncClient,
)
Expand All @@ -44,10 +44,10 @@
ToolConfig,
VideoMetadata,
)
from google.generativeai.caching import CachedContent # type: ignore[import]
from google.generativeai.types import Tool as GoogleTool # type: ignore[import]
from google.generativeai.caching import CachedContent # type: ignore[import-untyped]
from google.generativeai.types import Tool as GoogleTool # type: ignore[import-untyped]
from google.generativeai.types import caching_types, content_types
from google.generativeai.types.content_types import ( # type: ignore[import]
from google.generativeai.types.content_types import ( # type: ignore[import-untyped]
FunctionDeclarationType,
ToolDict,
)
Expand Down Expand Up @@ -212,7 +212,7 @@ async def _achat_with_retry(generation_method: Callable, **kwargs: Any) -> Any:
Any: The result from the chat generation method.
"""
retry_decorator = _create_retry_decorator()
from google.api_core.exceptions import InvalidArgument # type: ignore
from google.api_core.exceptions import InvalidArgument

@retry_decorator
async def _achat_with_retry(**kwargs: Any) -> Any:
Expand Down Expand Up @@ -787,10 +787,10 @@ class Joke(BaseModel):
raise an error."""

cached_content: Optional[str] = None
"""The name of the cached content used as context to serve the prediction.
"""The name of the cached content used as context to serve the prediction.
Note: only used in explicit caching, where users can have control over caching
(e.g. what content to cache) and enjoy guaranteed cost savings. Format:
Note: only used in explicit caching, where users can have control over caching
(e.g. what content to cache) and enjoy guaranteed cost savings. Format:
``cachedContents/{cachedContent}``.
"""

Expand Down Expand Up @@ -1275,7 +1275,7 @@ def bind_tools(
f"both:\n\n{tool_choice=}\n\n{tool_config=}"
)
try:
formatted_tools: list = [convert_to_openai_tool(tool) for tool in tools] # type: ignore[arg-type]
formatted_tools: list = [convert_to_openai_tool(tool) for tool in tools]
except Exception:
formatted_tools = [
tool_to_dict(convert_to_genai_function_declarations(tools))
Expand Down Expand Up @@ -1381,4 +1381,4 @@ def _get_tool_name(
tool: Union[ToolDict, GoogleTool],
) -> str:
genai_tool = tool_to_dict(convert_to_genai_function_declarations([tool]))
return [f["name"] for f in genai_tool["function_declarations"]][0] # type: ignore[index]
return [f["name"] for f in genai_tool["function_declarations"]][0]
13 changes: 5 additions & 8 deletions libs/genai/langchain_google_genai/llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from typing import Any, Callable, Dict, Iterator, List, Optional, Union

import google.api_core
import google.generativeai as genai # type: ignore[import]
import google.generativeai as genai # type: ignore[import-untyped]
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
Expand All @@ -16,10 +16,7 @@
from pydantic import BaseModel, ConfigDict, Field, SecretStr, model_validator
from typing_extensions import Self

from langchain_google_genai._enums import (
HarmBlockThreshold,
HarmCategory,
)
from langchain_google_genai._enums import HarmBlockThreshold, HarmCategory


class GoogleModelFamily(str, Enum):
Expand Down Expand Up @@ -170,9 +167,9 @@ class _BaseGoogleGenerativeAI(BaseModel):
)

safety_settings: Optional[Dict[HarmCategory, HarmBlockThreshold]] = None
"""The default safety settings to use for all generations.
For example:
"""The default safety settings to use for all generations.
For example:
from google.generativeai.types.safety_types import HarmBlockThreshold, HarmCategory
Expand Down
53 changes: 47 additions & 6 deletions libs/genai/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 21 additions & 3 deletions libs/genai/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ python = ">=3.9,<4.0"
langchain-core = ">=0.3.15,<0.4"
google-generativeai = "^0.8.0"
pydantic = ">=2,<3"
grpcio = "^1.66.2"

[tool.poetry.group.test]
optional = true
Expand All @@ -26,8 +27,15 @@ pytest-mock = "^3.10.0"
syrupy = "^4.0.2"
pytest-watcher = "^0.3.4"
pytest-asyncio = "^0.21.1"
numpy = "^1.26.2"
types-requests = "^2.31.0.20231231"
types-protobuf = "^4.24.0.4"
numexpr = "^2.8.6"
numpy = [
{ version = "^1", python = "<3.12" },
{ version = "^1.26.2", python = ">=3.12" },
]
langchain-tests = "0.3.1"
google-generativeai = "^0.8.0"

[tool.codespell]
ignore-words-list = "rouge"
Expand Down Expand Up @@ -59,7 +67,6 @@ mypy = "^1.10"
types-requests = "^2.28.11.5"
types-google-cloud-ndb = "^2.2.0.1"
types-protobuf = "^4.24.0.20240302"
numpy = "^1.26.2"


[tool.poetry.group.dev]
Expand All @@ -68,6 +75,7 @@ optional = true
[tool.poetry.group.dev.dependencies]
types-requests = "^2.31.0.10"
types-google-cloud-ndb = "^2.2.0.1"
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" }

[tool.ruff.lint]
select = [
Expand All @@ -77,7 +85,17 @@ select = [
]

[tool.mypy]
disallow_untyped_defs = "True"
disallow_untyped_defs = true
check_untyped_defs = true
error_summary = false
pretty = true
show_column_numbers = true
show_error_codes = true
show_error_context = true
warn_redundant_casts = true
warn_unreachable = true
warn_unused_configs = true
warn_unused_ignores = true

[tool.coverage.run]
omit = ["tests/*"]
Expand Down
7 changes: 2 additions & 5 deletions libs/genai/tests/integration_tests/test_chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,8 @@
from langchain_core.tools import tool
from pydantic import BaseModel

from langchain_google_genai import (
ChatGoogleGenerativeAI,
HarmBlockThreshold,
HarmCategory,
)
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_google_genai._enums import HarmBlockThreshold, HarmCategory
from langchain_google_genai.chat_models import ChatGoogleGenerativeAIError

_MODEL = "models/gemini-1.0-pro-001" # TODO: Use nano when it's available.
Expand Down
2 changes: 1 addition & 1 deletion libs/genai/tests/integration_tests/test_llms.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def test_google_generativeai_call(model_name: str) -> None:
if model_name:
llm = GoogleGenerativeAI(max_tokens=10, model=model_name)
else:
llm = GoogleGenerativeAI(max_tokens=10) # type: ignore[call-arg]
llm = GoogleGenerativeAI(max_tokens=10, model=model_names[0])
output = llm("Say foo:")
assert isinstance(output, str)
assert llm._llm_type == "google_palm"
Expand Down
Loading

0 comments on commit 5779ee2

Please sign in to comment.