Skip to content

Commit

Permalink
Merge branch 'main' of github.com:moiz-stri/langchain-google
Browse files Browse the repository at this point in the history
  • Loading branch information
moiz-stri committed Mar 15, 2024
2 parents 32682c4 + 08f2a02 commit 78cd85c
Show file tree
Hide file tree
Showing 19 changed files with 1,158 additions and 565 deletions.
11 changes: 7 additions & 4 deletions libs/vertexai/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ llm.invoke("Sing a ballad of LangChain.")
```

You can use other models, e.g. `chat-bison`:

```python
from langchain_google_vertexai import ChatVertexAI

Expand Down Expand Up @@ -58,7 +59,6 @@ The value of `image_url` can be any of the following:
- A local file path
- A base64 encoded image (e.g., `data:image/png;base64,abcd124`)


## Embeddings

You can use Google Cloud's embeddings models as:
Expand All @@ -71,24 +71,27 @@ embeddings.embed_query("hello, world!")
```

## LLMs

You can use Google Cloud's generative AI models as Langchain LLMs:

```python
from langchain.prompts import PromptTemplate
from langchain_google_vertexai import VertexAI
from langchain_core.prompts import PromptTemplate
from langchain_google_vertexai import ChatVertexAI

template = """Question: {question}
Answer: Let's think step by step."""
prompt = PromptTemplate.from_template(template)

llm = ChatVertexAI(model_name="gemini-pro")
chain = prompt | llm

question = "Who was the president in the year Justin Beiber was born?"
question = "Who was the president of the USA in 1994?"
print(chain.invoke({"question": question}))
```

You can use Gemini and Palm models, including code-generations ones:

```python
from langchain_google_vertexai import VertexAI

Expand Down
7 changes: 7 additions & 0 deletions libs/vertexai/langchain_google_vertexai/_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from langchain_google_vertexai._enums import HarmBlockThreshold, HarmCategory
from langchain_google_vertexai._utils import (
get_client_info,
get_user_agent,
is_codey_model,
is_gemini_model,
)
Expand Down Expand Up @@ -142,6 +143,12 @@ def _default_params(self) -> Dict[str, Any]:
)
return updated_params

@property
def _user_agent(self) -> str:
"""Gets the User Agent."""
_, user_agent = get_user_agent(f"{type(self).__name__}_{self.model_name}")
return user_agent

@classmethod
def _init_vertexai(cls, values: Dict) -> None:
vertexai.init(
Expand Down
2 changes: 1 addition & 1 deletion libs/vertexai/langchain_google_vertexai/_image_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from urllib.parse import urlparse

import requests
from google.cloud import storage # type: ignore[attr-defined]
from google.cloud import storage


class ImageBytesLoader:
Expand Down
32 changes: 24 additions & 8 deletions libs/vertexai/langchain_google_vertexai/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,12 @@
import dataclasses
import re
from importlib import metadata
from typing import Any, Callable, Dict, List, Optional, Union
from typing import Any, Callable, Dict, List, Optional, Tuple, Union

import google.api_core
import proto # type: ignore[import-untyped]
from google.api_core.gapic_v1.client_info import ClientInfo
from google.cloud import storage # type: ignore[attr-defined]
from google.cloud import storage # type: ignore[attr-defined, unused-ignore]
from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
Expand Down Expand Up @@ -45,7 +45,7 @@ def create_retry_decorator(
return decorator


def raise_vertex_import_error(minimum_expected_version: str = "1.38.0") -> None:
def raise_vertex_import_error(minimum_expected_version: str = "1.44.0") -> None:
"""Raise ImportError related to Vertex SDK being not available.
Args:
Expand All @@ -59,27 +59,43 @@ def raise_vertex_import_error(minimum_expected_version: str = "1.38.0") -> None:
)


def get_client_info(module: Optional[str] = None) -> "ClientInfo":
def get_user_agent(module: Optional[str] = None) -> Tuple[str, str]:
r"""Returns a custom user agent header.
Args:
module (Optional[str]):
Optional. The module for a custom user agent header.
Returns:
google.api_core.gapic_v1.client_info.ClientInfo
Tuple[str, str]
"""
langchain_version = metadata.version("langchain")
try:
langchain_version = metadata.version("langchain")
except metadata.PackageNotFoundError:
langchain_version = "0.0.0"
client_library_version = (
f"{langchain_version}-{module}" if module else langchain_version
)
return client_library_version, f"langchain/{client_library_version}"


def get_client_info(module: Optional[str] = None) -> "ClientInfo":
r"""Returns a client info object with a custom user agent header.
Args:
module (Optional[str]):
Optional. The module for a custom user agent header.
Returns:
google.api_core.gapic_v1.client_info.ClientInfo
"""
client_library_version, user_agent = get_user_agent(module)
return ClientInfo(
client_library_version=client_library_version,
user_agent=f"langchain/{client_library_version}",
user_agent=user_agent,
)


def load_image_from_gcs(path: str, project: Optional[str] = None) -> Image:
"""Loads im Image from GCS."""
"""Loads an Image from GCS."""
gcs_client = storage.Client(project=project)
pieces = path.split("/")
blobs = list(gcs_client.list_blobs(pieces[2], prefix="/".join(pieces[3:])))
Expand Down
Loading

0 comments on commit 78cd85c

Please sign in to comment.