Skip to content

Commit

Permalink
Stupid
Browse files Browse the repository at this point in the history
  • Loading branch information
hinthornw committed Nov 6, 2024
1 parent fd7026f commit 201b7c9
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 16 deletions.
1 change: 1 addition & 0 deletions .github/actions/python-integration-tests/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ runs:
run: |
poetry install --with dev
poetry run pip install -U langchain langchain_anthropic tiktoken rapidfuzz vcrpy numpy
poetry run pip install "urllib3>=2"
shell: bash
working-directory: python

Expand Down
6 changes: 5 additions & 1 deletion python/langsmith/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -708,7 +708,9 @@ def request_with_retries(
for idx in range(stop_after_attempt):
try:
try:
with ls_utils.filter_logs(_urllib3_logger, logging_filters):
with ls_utils.filter_logs(
_urllib3_logger, logging_filters
), ls_utils._ensure_orig():
response = self.session.request(
method,
(
Expand All @@ -721,6 +723,8 @@ def request_with_retries(
)
ls_utils.raise_for_status_with_text(response)
return response
# except TypeError:
# continue
except requests.exceptions.ReadTimeout as e:
logger.debug("Passing on exception %s", e)
if idx + 1 == stop_after_attempt:
Expand Down
82 changes: 67 additions & 15 deletions python/langsmith/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,8 @@
from langsmith import schemas as ls_schemas

_LOGGER = logging.getLogger(__name__)
vcr_log = logging.getLogger("vcr")
vcr_log.setLevel(logging.DEBUG)


class LangSmithError(Exception):
Expand Down Expand Up @@ -483,17 +485,77 @@ def get_cache_dir(cache: Optional[str]) -> Optional[str]:
_CACHE_HANDLES = {}
_CACHE_LOCK = threading.RLock()

import contextlib
from unittest import mock

cpool = None
conn = None

# Try to save the original types for urllib3
try:
import urllib3.connection as conn
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_VerifiedHTTPSConnection = getattr(conn, "VerifiedHTTPSConnection", None)
_connHTTPConnection = conn.HTTPConnection
_connHTTPSConnection = conn.HTTPSConnection


@contextlib.contextmanager
def _ensure_orig():
if conn is None or cpool is None:
# If urllib3 is not imported, just yield without doing anything
yield
return

original_connections = {
"HTTPConnection": conn.HTTPConnection,
"HTTPSConnection": conn.HTTPSConnection,
}
if _VerifiedHTTPSConnection:
original_connections["VerifiedHTTPSConnection"] = conn.VerifiedHTTPSConnection

original_pool_connections = {}
if hasattr(cpool, "HTTPConnectionPool"):
original_pool_connections["HTTPConnectionPool"] = (
cpool.HTTPConnectionPool.ConnectionCls
)
if hasattr(cpool, "HTTPSConnectionPool"):
original_pool_connections["HTTPSConnectionPool"] = (
cpool.HTTPSConnectionPool.ConnectionCls
)

try:
# Temporarily replace connection classes
conn.HTTPConnection = _connHTTPConnection
conn.HTTPSConnection = _connHTTPSConnection
if _VerifiedHTTPSConnection:
conn.VerifiedHTTPSConnection = _VerifiedHTTPSConnection

# Temporarily replace connection pool classes if they exist
if "HTTPConnectionPool" in original_pool_connections:
cpool.HTTPConnectionPool.ConnectionCls = _connHTTPConnection
if "HTTPSConnectionPool" in original_pool_connections:
cpool.HTTPSConnectionPool.ConnectionCls = _connHTTPSConnection

yield
finally:
# Restore original connection classes
for attr, original in original_connections.items():
setattr(conn, attr, original)

# Restore original connection pool classes
for attr, original in original_pool_connections.items():
getattr(cpool, attr).ConnectionCls = original


@contextlib.contextmanager
def with_cache(
path: Union[str, pathlib.Path], ignore_hosts: Optional[Sequence[str]] = None
) -> Generator[None, None, None]:
"""Use a cache for requests."""
print(
f"FOO TRYING FOR CACHE : {path} {ignore_hosts}",
flush=True,
file=sys.stderr,
)
try:
import vcr # type: ignore[import-untyped]
except ImportError:
Expand All @@ -503,9 +565,7 @@ def with_cache(
)

def _filter_request_headers(request: Any) -> Any:
print(f"Request: {repr(request)}: {ignore_hosts}", file=sys.stderr, flush=True)
if ignore_hosts and any(request.url.startswith(host) for host in ignore_hosts):
print(f"Ignoring URL: {request.url}", file=sys.stderr, flush=True)
return None
request.headers = {}
return request
Expand All @@ -529,11 +589,9 @@ def _filter_request_headers(request: Any) -> Any:

cassette = ls_vcr.use_cassette(cache_file)
_CACHE_HANDLES[path] = (1, cassette)
print(f"FOO ENTERING CASSETTE: {path}", file=sys.stderr, flush=True)
cassette.__enter__()
else:
existing, handle = _CACHE_HANDLES[path]
print(f"FOO ALREADY MADE: {existing} - {path}", file=sys.stderr, flush=True)
_CACHE_HANDLES[path] = (existing + 1, handle)

try:
Expand All @@ -543,15 +601,9 @@ def _filter_request_headers(request: Any) -> Any:
count, handle = _CACHE_HANDLES[path]
count -= 1
if count == 0:
print(f"FOO EXITING HANDLE: {path}", file=sys.stderr, flush=True)
handle.__exit__(None, None, None)
del _CACHE_HANDLES[path]
else:
print(
f"FOO DECREMENTING COUNT: {count} - {path}",
file=sys.stderr,
flush=True,
)
_CACHE_HANDLES[path] = (count, handle)


Expand Down

0 comments on commit 201b7c9

Please sign in to comment.