Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add the BQ job usage tracking from LangChain #17123

Merged
merged 16 commits into from
Feb 13, 2024
60 changes: 60 additions & 0 deletions libs/community/langchain_community/utils/google.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
"""Utilities to use Google provided components."""

from importlib import metadata
from typing import Any, Callable, Optional, Union

from langchain_core.callbacks import (
AsyncCallbackManagerForLLMRun,
CallbackManagerForLLMRun,
)
from langchain_core.language_models.llms import create_base_retry_decorator


def create_retry_decorator(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can probably remove this right? Not used?

*,
max_retries: int = 1,
run_manager: Optional[
Union[AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun]
] = None,
) -> Callable[[Any], Any]:
"""Creates a retry decorator for Vertex / Palm LLMs."""
from google.api_core.exceptions import (
Aborted,
DeadlineExceeded,
GoogleAPIError,
ResourceExhausted,
ServiceUnavailable,
)

errors = [
ResourceExhausted,
ServiceUnavailable,
Aborted,
DeadlineExceeded,
GoogleAPIError,
]
decorator = create_base_retry_decorator(
error_types=errors, max_retries=max_retries, run_manager=run_manager
)
return decorator


def get_client_info(module: Optional[str] = None) -> Any:
r"""Returns a custom user agent header.

Args:
module (Optional[str]):
Optional. The module for a custom user agent header.
Returns:
google.api_core.gapic_v1.client_info.ClientInfo
"""
from google.api_core.gapic_v1.client_info import ClientInfo

langchain_version = metadata.version("langchain")
client_library_version = (
f"{langchain_version}-{module}" if module else langchain_version
)
return ClientInfo(
client_library_version=client_library_version,
user_agent=f"langchain/{client_library_version}",
)
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from langchain_core.embeddings import Embeddings
from langchain_core.vectorstores import VectorStore

from langchain_community.utils.google import get_client_info
from langchain_community.vectorstores.utils import (
DistanceStrategy,
maximal_marginal_relevance,
Expand All @@ -30,7 +31,6 @@

_MIN_INDEX_ROWS = 5000 # minimal number of rows for creating an index
_INDEX_CHECK_PERIOD_SECONDS = 60 # Do not check for index more often that this.

_vector_table_lock = Lock() # process-wide BigQueryVectorSearch table lock


Expand Down Expand Up @@ -90,8 +90,12 @@ def __init__(
try:
from google.cloud import bigquery

ashleyxuu marked this conversation as resolved.
Show resolved Hide resolved
client_info = get_client_info(module="bigquery-vector-search")
self.bq_client = bigquery.Client(
project=project_id, location=location, credentials=credentials
project=project_id,
location=location,
credentials=credentials,
client_info=client_info,
)
except ModuleNotFoundError:
raise ImportError(
Expand Down
Loading