From 079a5706eb98d42d90530f0f7847fb8fe862fdb1 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Tue, 24 Sep 2024 16:16:26 -0700 Subject: [PATCH 1/5] Add suite-level caching --- .../python-integration-tests/action.yml | 17 ++-- python/langsmith/_internal/_cache_utils.py | 88 +++++++++++++++++++ python/langsmith/_testing.py | 23 ++--- 3 files changed, 109 insertions(+), 19 deletions(-) create mode 100644 python/langsmith/_internal/_cache_utils.py diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index 516af5b86..7b9cef8fa 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -46,10 +46,11 @@ runs: LANGCHAIN_ENDPOINT: ${{ inputs.langchain-endpoint }} LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }} OPENAI_API_KEY: ${{ inputs.openai-api-key }} + LANGSMITH_TEST_CACHE: tests/cassettes run: make integration_tests_fast shell: bash working-directory: python - + - name: Run doctest env: LANGCHAIN_TRACING_V2: "true" @@ -57,19 +58,19 @@ runs: LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }} OPENAI_API_KEY: ${{ inputs.openai-api-key }} ANTHROPIC_API_KEY: ${{ inputs.anthropic-api-key }} + LANGSMITH_TEST_CACHE: tests/cassettes run: make doctest shell: bash working-directory: python - - name: Run Evaluation env: - LANGCHAIN_TRACING_V2: "true" - LANGCHAIN_ENDPOINT: ${{ inputs.langchain-endpoint }} - LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }} - OPENAI_API_KEY: ${{ inputs.openai-api-key }} - ANTHROPIC_API_KEY: ${{ inputs.anthropic-api-key }} + LANGCHAIN_TRACING_V2: "true" + LANGCHAIN_ENDPOINT: ${{ inputs.langchain-endpoint }} + LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }} + OPENAI_API_KEY: ${{ inputs.openai-api-key }} + ANTHROPIC_API_KEY: ${{ inputs.anthropic-api-key }} + LANGSMITH_TEST_CACHE: tests/cassettes run: make evals shell: bash working-directory: python - diff --git a/python/langsmith/_internal/_cache_utils.py b/python/langsmith/_internal/_cache_utils.py new file mode 100644 index 000000000..03abb7c11 --- /dev/null +++ b/python/langsmith/_internal/_cache_utils.py @@ -0,0 +1,88 @@ +"""Provide utilities for managing caching in LangSmith. + +Includes a CacheManager class that handles the lifecycle of caching +operations, allowing for easy setup and teardown of caching contexts. +""" + +import pathlib +from typing import Optional, Sequence, Union + +from langsmith import utils + + +class CacheManager: + """Manage caching operations for LangSmith. + + Provides methods to start and stop caching, and can be used + as a context manager for automatic cache management. + + Attributes: + path (Optional[Union[str, pathlib.Path]]): The path to the cache file. + ignore_hosts (Optional[Sequence[str]]): A list of hosts to ignore in caching. + context_manager: The context manager for caching operations. + context: The context object for the current caching session. + """ + + def __init__( + self, + path: Optional[Union[str, pathlib.Path]], + ignore_hosts: Optional[Sequence[str]] = None, + ): + """Initialize the CacheManager. + + Args: + path: The path to the cache file. + ignore_hosts: A list of hosts to ignore in caching. + """ + self.path = path + self.ignore_hosts = ignore_hosts + self.context_manager = None + self.context = None + + def start_caching(self): + """Start the caching session. + + Returns: + self: The CacheManager instance. + + Raises: + RuntimeError: If caching is already started. + """ + if self.context is not None: + raise RuntimeError("Caching is already started") + + self.context_manager = utils.with_optional_cache(self.path, self.ignore_hosts) + if self.context_manager: + self.context = self.context_manager.__enter__() + return self + + def close(self): + """Close the current caching session. + + Raises: + RuntimeError: If caching is not started. + """ + if self.context_manager is not None: + self.context_manager.__exit__(None, None, None) + if self.context is None: + raise RuntimeError("Caching is not started") + self.context = None + self.context_manager = None + + def __enter__(self): + """Enter the context manager, starting the caching session. + + Returns: + self: The CacheManager instance with caching started. + """ + return self.start_caching() + + def __exit__(self, exc_type, exc_value, traceback): + """Exit the context manager, closing the caching session. + + Args: + exc_type: The type of the exception that caused the context to be exited. + exc_value: The instance of the exception. + traceback: A traceback object encoding the stack trace. + """ + self.close() diff --git a/python/langsmith/_testing.py b/python/langsmith/_testing.py index d4a3305f1..b075ad34e 100644 --- a/python/langsmith/_testing.py +++ b/python/langsmith/_testing.py @@ -21,6 +21,7 @@ from langsmith import run_trees as rt from langsmith import schemas as ls_schemas from langsmith import utils as ls_utils +from langsmith._internal import _cache_utils as cache_utils try: import pytest # type: ignore @@ -365,6 +366,7 @@ def _end_tests( }, ) test_suite.wait() + test_suite.cache_manager.close() VT = TypeVar("VT", bound=Optional[dict]) @@ -387,12 +389,20 @@ def __init__( client: Optional[ls_client.Client], experiment: ls_schemas.TracerSession, dataset: ls_schemas.Dataset, + cache: Optional[str] = None, ): self.client = client or rt.get_cached_client() self._experiment = experiment self._dataset = dataset self._version: Optional[datetime.datetime] = None self._executor = ls_utils.ContextThreadPoolExecutor(max_workers=1) + cache_path = ( + Path(cache) / f"{self._dataset.id}.yaml" if cache["cache"] else None + ) + self.cache_manager = cache_utils.CacheManager( + path=cache_path, ignore_hosts=[self.client.api_url] + ) + self.cache_manager.start_caching() atexit.register(_end_tests, self) @property @@ -536,7 +546,7 @@ def _ensure_example( for k in output_keys: outputs[k] = inputs.pop(k, None) test_suite = _LangSmithTestSuite.from_test( - client, func, langtest_extra.get("test_suite_name") + client, func, langtest_extra.get("test_suite_name"), langtest_extra.get("cache") ) example_id, example_name = _get_id(func, inputs, test_suite.id) example_id = langtest_extra["id"] or example_id @@ -592,11 +602,6 @@ def _test(): except BaseException as e: logger.warning(f"Failed to create feedback for run_id {run_id}: {e}") - cache_path = ( - Path(langtest_extra["cache"]) / f"{test_suite.id}.yaml" - if langtest_extra["cache"] - else None - ) current_context = rh.get_tracing_context() metadata = { **(current_context["metadata"] or {}), @@ -605,11 +610,7 @@ def _test(): "reference_example_id": str(example_id), }, } - with rh.tracing_context( - **{**current_context, "metadata": metadata} - ), ls_utils.with_optional_cache( - cache_path, ignore_hosts=[test_suite.client.api_url] - ): + with rh.tracing_context(**{**current_context, "metadata": metadata}): _test() From 684a77e2fcb6b8201ddc60c1b467da805370f1de Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:42:55 -0800 Subject: [PATCH 2/5] tmp --- python/langsmith/utils.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/python/langsmith/utils.py b/python/langsmith/utils.py index 4be0ce8fd..bf937c05e 100644 --- a/python/langsmith/utils.py +++ b/python/langsmith/utils.py @@ -480,11 +480,15 @@ def get_cache_dir(cache: Optional[str]) -> Optional[str]: return get_env_var("TEST_CACHE", default=None) +_OPEN_CACHES = {} + + @contextlib.contextmanager def with_cache( path: Union[str, pathlib.Path], ignore_hosts: Optional[Sequence[str]] = None ) -> Generator[None, None, None]: """Use a cache for requests.""" + global _OPEN_CACHES try: import vcr # type: ignore[import-untyped] except ImportError: @@ -500,6 +504,19 @@ def _filter_request_headers(request: Any) -> Any: return request cache_dir, cache_file = os.path.split(path) + if _OPEN_CACHES: + print( + f"{len(_OPEN_CACHES)}ALREADY OPEN: {_OPEN_CACHES}", + file=sys.stderr, + flush=True, + ) + else: + + print( + "NEW CACHE", + file=sys.stderr, + flush=True, + ) ls_vcr = vcr.VCR( serializer=( @@ -516,7 +533,13 @@ def _filter_request_headers(request: Any) -> Any: before_record_request=_filter_request_headers, ) with ls_vcr.use_cassette(cache_file): + _OPEN_CACHES.setdefault(str(cache_file), 0) + _OPEN_CACHES[str(cache_file)] += 1 + yield + _OPEN_CACHES[str(cache_file)] -= 1 + if _OPEN_CACHES[str(cache_file)] == 0: + _OPEN_CACHES.pop(str(cache_file), 0) @contextlib.contextmanager From 4fc1d318d6b1cbc84b6de9cddbf37743e12c20fa Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Tue, 5 Nov 2024 18:51:38 -0800 Subject: [PATCH 3/5] nonfast --- .github/actions/python-integration-tests/action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index 23f632a54..160d9e47f 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -47,7 +47,7 @@ runs: LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key-beta }} OPENAI_API_KEY: ${{ inputs.openai-api-key }} LANGSMITH_TEST_CACHE: tests/cassettes - run: make integration_tests_fast + run: make integration_tests shell: bash working-directory: python From fd7026f983ffc2e7e777fe0e3f603af93ba80e1e Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Tue, 5 Nov 2024 19:55:33 -0800 Subject: [PATCH 4/5] No dist --- .../python-integration-tests/action.yml | 20 ++--- python/Makefile | 5 +- python/langsmith/_testing.py | 11 ++- python/langsmith/client.py | 1 + python/langsmith/evaluation/_arunner.py | 19 +++-- python/langsmith/evaluation/_runner.py | 13 ++- python/langsmith/utils.py | 81 +++++++++++-------- 7 files changed, 97 insertions(+), 53 deletions(-) diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index 160d9e47f..f7db167ac 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -40,16 +40,16 @@ runs: shell: bash working-directory: python - - name: Run integration tests - env: - LANGCHAIN_TRACING_V2: "true" - LANGCHAIN_ENDPOINT: https://beta.api.smith.langchain.com - LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key-beta }} - OPENAI_API_KEY: ${{ inputs.openai-api-key }} - LANGSMITH_TEST_CACHE: tests/cassettes - run: make integration_tests - shell: bash - working-directory: python + # - name: Run integration tests + # env: + # LANGCHAIN_TRACING_V2: "true" + # LANGCHAIN_ENDPOINT: https://beta.api.smith.langchain.com + # LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key-beta }} + # OPENAI_API_KEY: ${{ inputs.openai-api-key }} + # LANGSMITH_TEST_CACHE: tests/cassettes + # run: make integration_tests + # shell: bash + # working-directory: python - name: Run doctest env: diff --git a/python/Makefile b/python/Makefile index f7ca1f502..81ffac3f0 100644 --- a/python/Makefile +++ b/python/Makefile @@ -39,9 +39,12 @@ integration_tests: poetry run python -m pytest -v --durations=10 --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests integration_tests_fast: - poetry run python -m pytest -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc tests/integration_tests + poetry run python -m pytest -n auto --durations=10 -v --cov=langsmith --cov-report=term-missing --cov-report=html --cov-config=.coveragerc --capture=no tests/integration_tests doctest: + poetry run python -m pytest -vv --capture=no --durations=10 --doctest-modules langsmith/evaluation + +doctest_fast: poetry run python -m pytest -n auto --durations=10 --doctest-modules langsmith evals: diff --git a/python/langsmith/_testing.py b/python/langsmith/_testing.py index 243e0337d..0034f741a 100644 --- a/python/langsmith/_testing.py +++ b/python/langsmith/_testing.py @@ -677,7 +677,16 @@ async def _test(): with rh.tracing_context( **{**current_context, "metadata": metadata} ), ls_utils.with_optional_cache( - cache_path, ignore_hosts=[test_suite.client.api_url] + cache_path, + ignore_hosts=list( + set( + ( + test_suite.client.api_url, + "https://beta.api.smith.langchain.com", + "https://api.smith.langchain.com", + ) + ) + ), ): await _test() diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 09216ba48..7c9968ccd 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -826,6 +826,7 @@ def request_with_retries( raise ls_utils.LangSmithError( f"Failed to {method} {pathname} in LangSmith API. {emsg}" f"{_context}" + f"{ls_utils._CACHE_HANDLES}" ) from e except to_ignore_ as e: if response is not None: diff --git a/python/langsmith/evaluation/_arunner.py b/python/langsmith/evaluation/_arunner.py index 7c791ac09..e338bbe8b 100644 --- a/python/langsmith/evaluation/_arunner.py +++ b/python/langsmith/evaluation/_arunner.py @@ -170,14 +170,10 @@ async def aevaluate( Evaluating over only a subset of the examples using an async generator: - >>> async def example_generator(): - ... examples = client.list_examples(dataset_name=dataset_name, limit=5) - ... for example in examples: - ... yield example >>> results = asyncio.run( ... aevaluate( ... apredict, - ... data=example_generator(), + ... data=client.list_examples(dataset_name=dataset_name, limit=5), ... evaluators=[accuracy], ... summary_evaluators=[precision], ... experiment_prefix="My Subset Experiment", @@ -405,7 +401,18 @@ async def _aevaluate( cache_path = pathlib.Path(cache_dir) / f"{dsid}.yaml" else: cache_path = None - with ls_utils.with_optional_cache(cache_path, ignore_hosts=[client.api_url]): + with ls_utils.with_optional_cache( + cache_path, + ignore_hosts=list( + set( + ( + client.api_url, + "https://beta.api.smith.langchain.com", + "https://api.smith.langchain.com", + ) + ) + ), + ): if is_async_target: manager = await manager.awith_predictions( cast(ATARGET_T, target), max_concurrency=max_concurrency diff --git a/python/langsmith/evaluation/_runner.py b/python/langsmith/evaluation/_runner.py index 111986b76..8734b9f00 100644 --- a/python/langsmith/evaluation/_runner.py +++ b/python/langsmith/evaluation/_runner.py @@ -921,7 +921,18 @@ def _evaluate( cache_path = ( pathlib.Path(cache_dir) / f"{manager.dataset_id}.yaml" if cache_dir else None ) - with ls_utils.with_optional_cache(cache_path, ignore_hosts=[client.api_url]): + with ls_utils.with_optional_cache( + cache_path, + ignore_hosts=list( + set( + ( + client.api_url, + "https://beta.api.smith.langchain.com", + "https://api.smith.langchain.com", + ) + ) + ), + ): if _is_callable(target): # Add predictions to the experiment. manager = manager.with_predictions( diff --git a/python/langsmith/utils.py b/python/langsmith/utils.py index bf937c05e..ee1edce50 100644 --- a/python/langsmith/utils.py +++ b/python/langsmith/utils.py @@ -480,7 +480,8 @@ def get_cache_dir(cache: Optional[str]) -> Optional[str]: return get_env_var("TEST_CACHE", default=None) -_OPEN_CACHES = {} +_CACHE_HANDLES = {} +_CACHE_LOCK = threading.RLock() @contextlib.contextmanager @@ -488,7 +489,11 @@ def with_cache( path: Union[str, pathlib.Path], ignore_hosts: Optional[Sequence[str]] = None ) -> Generator[None, None, None]: """Use a cache for requests.""" - global _OPEN_CACHES + print( + f"FOO TRYING FOR CACHE : {path} {ignore_hosts}", + flush=True, + file=sys.stderr, + ) try: import vcr # type: ignore[import-untyped] except ImportError: @@ -498,48 +503,56 @@ def with_cache( ) def _filter_request_headers(request: Any) -> Any: + print(f"Request: {repr(request)}: {ignore_hosts}", file=sys.stderr, flush=True) if ignore_hosts and any(request.url.startswith(host) for host in ignore_hosts): + print(f"Ignoring URL: {request.url}", file=sys.stderr, flush=True) return None request.headers = {} return request cache_dir, cache_file = os.path.split(path) - if _OPEN_CACHES: - print( - f"{len(_OPEN_CACHES)}ALREADY OPEN: {_OPEN_CACHES}", - file=sys.stderr, - flush=True, - ) - else: - print( - "NEW CACHE", - file=sys.stderr, - flush=True, - ) + with _CACHE_LOCK: + if path not in _CACHE_HANDLES: + ls_vcr = vcr.VCR( + serializer=( + "yaml" + if cache_file.endswith(".yaml") or cache_file.endswith(".yml") + else "json" + ), + cassette_library_dir=cache_dir, + record_mode="new_episodes", + match_on=["uri", "method", "path", "body"], + filter_headers=["authorization", "Set-Cookie"], + before_record_request=_filter_request_headers, + ) - ls_vcr = vcr.VCR( - serializer=( - "yaml" - if cache_file.endswith(".yaml") or cache_file.endswith(".yml") - else "json" - ), - cassette_library_dir=cache_dir, - # Replay previous requests, record new ones - # TODO: Support other modes - record_mode="new_episodes", - match_on=["uri", "method", "path", "body"], - filter_headers=["authorization", "Set-Cookie"], - before_record_request=_filter_request_headers, - ) - with ls_vcr.use_cassette(cache_file): - _OPEN_CACHES.setdefault(str(cache_file), 0) - _OPEN_CACHES[str(cache_file)] += 1 + cassette = ls_vcr.use_cassette(cache_file) + _CACHE_HANDLES[path] = (1, cassette) + print(f"FOO ENTERING CASSETTE: {path}", file=sys.stderr, flush=True) + cassette.__enter__() + else: + existing, handle = _CACHE_HANDLES[path] + print(f"FOO ALREADY MADE: {existing} - {path}", file=sys.stderr, flush=True) + _CACHE_HANDLES[path] = (existing + 1, handle) + try: yield - _OPEN_CACHES[str(cache_file)] -= 1 - if _OPEN_CACHES[str(cache_file)] == 0: - _OPEN_CACHES.pop(str(cache_file), 0) + finally: + with _CACHE_LOCK: + count, handle = _CACHE_HANDLES[path] + count -= 1 + if count == 0: + print(f"FOO EXITING HANDLE: {path}", file=sys.stderr, flush=True) + handle.__exit__(None, None, None) + del _CACHE_HANDLES[path] + else: + print( + f"FOO DECREMENTING COUNT: {count} - {path}", + file=sys.stderr, + flush=True, + ) + _CACHE_HANDLES[path] = (count, handle) @contextlib.contextmanager From 201b7c95cb22e1002ce4fb083229f4aa279183f8 Mon Sep 17 00:00:00 2001 From: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com> Date: Wed, 6 Nov 2024 00:16:08 -0800 Subject: [PATCH 5/5] Stupid --- .../python-integration-tests/action.yml | 1 + python/langsmith/client.py | 6 +- python/langsmith/utils.py | 82 +++++++++++++++---- 3 files changed, 73 insertions(+), 16 deletions(-) diff --git a/.github/actions/python-integration-tests/action.yml b/.github/actions/python-integration-tests/action.yml index f7db167ac..bb39ee917 100644 --- a/.github/actions/python-integration-tests/action.yml +++ b/.github/actions/python-integration-tests/action.yml @@ -37,6 +37,7 @@ runs: run: | poetry install --with dev poetry run pip install -U langchain langchain_anthropic tiktoken rapidfuzz vcrpy numpy + poetry run pip install "urllib3>=2" shell: bash working-directory: python diff --git a/python/langsmith/client.py b/python/langsmith/client.py index 7c9968ccd..aa234b834 100644 --- a/python/langsmith/client.py +++ b/python/langsmith/client.py @@ -708,7 +708,9 @@ def request_with_retries( for idx in range(stop_after_attempt): try: try: - with ls_utils.filter_logs(_urllib3_logger, logging_filters): + with ls_utils.filter_logs( + _urllib3_logger, logging_filters + ), ls_utils._ensure_orig(): response = self.session.request( method, ( @@ -721,6 +723,8 @@ def request_with_retries( ) ls_utils.raise_for_status_with_text(response) return response + # except TypeError: + # continue except requests.exceptions.ReadTimeout as e: logger.debug("Passing on exception %s", e) if idx + 1 == stop_after_attempt: diff --git a/python/langsmith/utils.py b/python/langsmith/utils.py index ee1edce50..37f647a91 100644 --- a/python/langsmith/utils.py +++ b/python/langsmith/utils.py @@ -42,6 +42,8 @@ from langsmith import schemas as ls_schemas _LOGGER = logging.getLogger(__name__) +vcr_log = logging.getLogger("vcr") +vcr_log.setLevel(logging.DEBUG) class LangSmithError(Exception): @@ -483,17 +485,77 @@ def get_cache_dir(cache: Optional[str]) -> Optional[str]: _CACHE_HANDLES = {} _CACHE_LOCK = threading.RLock() +import contextlib +from unittest import mock + +cpool = None +conn = None + +# Try to save the original types for urllib3 +try: + import urllib3.connection as conn + import urllib3.connectionpool as cpool +except ImportError: # pragma: no cover + pass +else: + _VerifiedHTTPSConnection = getattr(conn, "VerifiedHTTPSConnection", None) + _connHTTPConnection = conn.HTTPConnection + _connHTTPSConnection = conn.HTTPSConnection + + +@contextlib.contextmanager +def _ensure_orig(): + if conn is None or cpool is None: + # If urllib3 is not imported, just yield without doing anything + yield + return + + original_connections = { + "HTTPConnection": conn.HTTPConnection, + "HTTPSConnection": conn.HTTPSConnection, + } + if _VerifiedHTTPSConnection: + original_connections["VerifiedHTTPSConnection"] = conn.VerifiedHTTPSConnection + + original_pool_connections = {} + if hasattr(cpool, "HTTPConnectionPool"): + original_pool_connections["HTTPConnectionPool"] = ( + cpool.HTTPConnectionPool.ConnectionCls + ) + if hasattr(cpool, "HTTPSConnectionPool"): + original_pool_connections["HTTPSConnectionPool"] = ( + cpool.HTTPSConnectionPool.ConnectionCls + ) + + try: + # Temporarily replace connection classes + conn.HTTPConnection = _connHTTPConnection + conn.HTTPSConnection = _connHTTPSConnection + if _VerifiedHTTPSConnection: + conn.VerifiedHTTPSConnection = _VerifiedHTTPSConnection + + # Temporarily replace connection pool classes if they exist + if "HTTPConnectionPool" in original_pool_connections: + cpool.HTTPConnectionPool.ConnectionCls = _connHTTPConnection + if "HTTPSConnectionPool" in original_pool_connections: + cpool.HTTPSConnectionPool.ConnectionCls = _connHTTPSConnection + + yield + finally: + # Restore original connection classes + for attr, original in original_connections.items(): + setattr(conn, attr, original) + + # Restore original connection pool classes + for attr, original in original_pool_connections.items(): + getattr(cpool, attr).ConnectionCls = original + @contextlib.contextmanager def with_cache( path: Union[str, pathlib.Path], ignore_hosts: Optional[Sequence[str]] = None ) -> Generator[None, None, None]: """Use a cache for requests.""" - print( - f"FOO TRYING FOR CACHE : {path} {ignore_hosts}", - flush=True, - file=sys.stderr, - ) try: import vcr # type: ignore[import-untyped] except ImportError: @@ -503,9 +565,7 @@ def with_cache( ) def _filter_request_headers(request: Any) -> Any: - print(f"Request: {repr(request)}: {ignore_hosts}", file=sys.stderr, flush=True) if ignore_hosts and any(request.url.startswith(host) for host in ignore_hosts): - print(f"Ignoring URL: {request.url}", file=sys.stderr, flush=True) return None request.headers = {} return request @@ -529,11 +589,9 @@ def _filter_request_headers(request: Any) -> Any: cassette = ls_vcr.use_cassette(cache_file) _CACHE_HANDLES[path] = (1, cassette) - print(f"FOO ENTERING CASSETTE: {path}", file=sys.stderr, flush=True) cassette.__enter__() else: existing, handle = _CACHE_HANDLES[path] - print(f"FOO ALREADY MADE: {existing} - {path}", file=sys.stderr, flush=True) _CACHE_HANDLES[path] = (existing + 1, handle) try: @@ -543,15 +601,9 @@ def _filter_request_headers(request: Any) -> Any: count, handle = _CACHE_HANDLES[path] count -= 1 if count == 0: - print(f"FOO EXITING HANDLE: {path}", file=sys.stderr, flush=True) handle.__exit__(None, None, None) del _CACHE_HANDLES[path] else: - print( - f"FOO DECREMENTING COUNT: {count} - {path}", - file=sys.stderr, - flush=True, - ) _CACHE_HANDLES[path] = (count, handle)