Skip to content

Commit

Permalink
Merge branch 'main' into fix-missing-scope-exception
Browse files Browse the repository at this point in the history
  • Loading branch information
lkuligin authored Nov 28, 2024
2 parents 67c11db + 7f704aa commit 02e7c2d
Show file tree
Hide file tree
Showing 19 changed files with 712 additions and 649 deletions.
7 changes: 7 additions & 0 deletions libs/community/langchain_google_community/drive.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ class GoogleDriveLoader(BaseLoader, BaseModel):
"""Path to the credentials file."""
token_path: Path = Path.home() / ".credentials" / "token.json"
"""Path to the token file."""
credentials: Any = None
"""Your own google credentials created via your own mechanism"""
folder_id: Optional[str] = None
"""The folder id to load from."""
document_ids: Optional[List[str]] = None
Expand Down Expand Up @@ -276,6 +278,11 @@ def _load_credentials(self) -> Any:
if self.token_path.exists():
creds = Credentials.from_authorized_user_file(str(self.token_path), SCOPES)

if self.credentials:
# use whatever was passed to us
creds = self.credentials
return creds

if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
Expand Down
2 changes: 1 addition & 1 deletion libs/community/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langchain-google-community"
version = "2.0.2"
version = "2.0.3"
description = "An integration package connecting miscellaneous Google's products and LangChain"
authors = []
readme = "README.md"
Expand Down
7 changes: 7 additions & 0 deletions libs/genai/langchain_google_genai/_image_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from typing import Any, Dict
from urllib.parse import urlparse

import filetype # type: ignore[import]
import requests
from google.ai.generativelanguage_v1beta.types import Part

Expand Down Expand Up @@ -87,7 +88,13 @@ def load_part(self, image_string: str) -> Part:
raise ValueError(msg)

inline_data: Dict[str, Any] = {"data": bytes_}

mime_type, _ = mimetypes.guess_type(image_string)
if not mime_type:
kind = filetype.guess(bytes_)
if kind:
mime_type = kind.mime

if mime_type:
inline_data["mime_type"] = mime_type

Expand Down
13 changes: 12 additions & 1 deletion libs/genai/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion libs/genai/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "langchain-google-genai"
version = "2.0.5"
version = "2.0.6"
description = "An integration package connecting Google's genai package and LangChain"
authors = []
readme = "README.md"
Expand All @@ -15,6 +15,7 @@ python = ">=3.9,<4.0"
langchain-core = ">=0.3.15,<0.4"
google-generativeai = "^0.8.0"
pydantic = ">=2,<3"
filetype = "^1.2.0"

[tool.poetry.group.test]
optional = true
Expand Down
27 changes: 27 additions & 0 deletions libs/genai/tests/integration_tests/test_chat_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,33 @@ def test_chat_google_genai_invoke_multimodal() -> None:
assert len(chunk.content.strip()) > 0


def test_chat_google_genai_invoke_multimodal_by_url() -> None:
messages: list = [
HumanMessage(
content=[
{
"type": "text",
"text": "Guess what's in this picture! You have 3 guesses.",
},
{
"type": "image_url",
"image_url": "https://picsum.photos/seed/picsum/200/300",
},
]
),
]
llm = ChatGoogleGenerativeAI(model=_VISION_MODEL)
response = llm.invoke(messages)
assert isinstance(response.content, str)
assert len(response.content.strip()) > 0

# Try streaming
for chunk in llm.stream(messages):
print(chunk) # noqa: T201
assert isinstance(chunk.content, str)
assert len(chunk.content.strip()) > 0


def test_chat_google_genai_invoke_multimodal_multiple_messages() -> None:
messages: list = [
HumanMessage(content="Hi there"),
Expand Down
4 changes: 3 additions & 1 deletion libs/vertexai/langchain_google_vertexai/_anthropic_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,9 @@
from pydantic import BaseModel

if TYPE_CHECKING:
from anthropic.types import RawMessageStreamEvent # type: ignore
from anthropic.types import (
RawMessageStreamEvent, # type: ignore[unused-ignore, import-not-found]
)

_message_type_lookups = {
"human": "user",
Expand Down
7 changes: 6 additions & 1 deletion libs/vertexai/langchain_google_vertexai/_image_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import os
import re
from enum import Enum
from functools import cached_property
from typing import Dict, Optional, Union
from urllib.parse import urlparse

Expand Down Expand Up @@ -43,6 +44,10 @@ def __init__(
"""
self._project = project

@cached_property
def _storage_client(self):
return storage.Client(project=self._project)

def load_bytes(self, image_string: str) -> bytes:
"""Routes to the correct loader based on the image_string.
Expand Down Expand Up @@ -198,7 +203,7 @@ def _blob_from_gcs(self, gcs_uri: str) -> storage.Blob:
storage.Blob
"""

gcs_client = storage.Client(project=self._project)
gcs_client = self._storage_client
blob = storage.Blob.from_string(gcs_uri, gcs_client)
blob.reload(client=gcs_client)
return blob
Expand Down
2 changes: 1 addition & 1 deletion libs/vertexai/langchain_google_vertexai/model_garden.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def __init__(self, **kwargs: Any) -> None:

@model_validator(mode="after")
def validate_environment(self) -> Self:
from anthropic import ( # type: ignore
from anthropic import ( # type: ignore[unused-ignore, import-not-found]
AnthropicVertex,
AsyncAnthropicVertex,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -292,10 +292,15 @@ def mset(self, key_value_pairs: Sequence[Tuple[str, Document]]) -> None:
entities = []
for key, document in zip(keys, documents):
entity = self._client.entity(
key=key, exclude_from_indexes=self.exclude_from_indexes
key=key,
exclude_from_indexes=self.exclude_from_indexes
if self.exclude_from_indexes
else [],
)
metadata_entity = self._client.entity(
exclude_from_indexes=self.exclude_from_indexes
if self.exclude_from_indexes
else []
)
metadata_entity.update(document.metadata)
entity[self._text_property_name] = document.page_content
Expand Down
Loading

0 comments on commit 02e7c2d

Please sign in to comment.