diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 484ebd1f38313..d90322e897799 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -3,43 +3,4 @@ Hi there! Thank you for even being interested in contributing to LangChain. As an open-source project in a rapidly developing field, we are extremely open to contributions, whether they involve new features, improved infrastructure, better documentation, or bug fixes. -To learn about how to contribute, please follow the [guides here](https://python.langchain.com/docs/contributing/) - -## ๐Ÿ—บ๏ธ Guidelines - -### ๐Ÿ‘ฉโ€๐Ÿ’ป Ways to contribute - -There are many ways to contribute to LangChain. Here are some common ways people contribute: - -- [**Documentation**](https://python.langchain.com/docs/contributing/documentation): Help improve our docs, including this one! -- [**Code**](https://python.langchain.com/docs/contributing/code): Help us write code, fix bugs, or improve our infrastructure. -- [**Integrations**](https://python.langchain.com/docs/contributing/integrations): Help us integrate with your favorite vendors and tools. - -### ๐ŸšฉGitHub Issues - -Our [issues](https://github.com/langchain-ai/langchain/issues) page is kept up to date with bugs, improvements, and feature requests. - -There is a taxonomy of labels to help with sorting and discovery of issues of interest. Please use these to help organize issues. - -If you start working on an issue, please assign it to yourself. - -If you are adding an issue, please try to keep it focused on a single, modular bug/improvement/feature. -If two issues are related, or blocking, please link them rather than combining them. - -We will try to keep these issues as up-to-date as possible, though -with the rapid rate of development in this field some may get out of date. -If you notice this happening, please let us know. - -### ๐Ÿ™‹Getting Help - -Our goal is to have the simplest developer setup possible. Should you experience any difficulty getting setup, please -contact a maintainer! Not only do we want to help get you unblocked, but we also want to make sure that the process is -smooth for future contributors. - -In a similar vein, we do enforce certain linting, formatting, and documentation standards in the codebase. -If you are finding these difficult (or even just annoying) to work with, feel free to contact a maintainer for help - -we do not want these to get in the way of getting good code into the codebase. - -### Contributor Documentation - -To learn about how to contribute, please follow the [guides here](https://python.langchain.com/docs/contributing/) +To learn how to contribute to LangChain, please follow the [contribution guide here](https://python.langchain.com/docs/contributing/). \ No newline at end of file diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 0e85fdf17da96..4d86dac6a59cf 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,20 +1,24 @@ - +Additional guidelines: +- Make sure optional dependencies are imported within a function. +- Please do not add dependencies to pyproject.toml files (even optional ones) unless they are required for unit tests. +- Most PRs should not touch more than one package. +- Changes should be backwards compatible. +- If you are adding something to community, do not re-import it in langchain. + +If no one reviews your PR within a few days, please @-mention one of baskaryan, efriis, eyurtsev, hwchase17. diff --git a/.github/workflows/_integration_test.yml b/.github/workflows/_integration_test.yml index 7e4afe70e2bfa..1189907e96695 100644 --- a/.github/workflows/_integration_test.yml +++ b/.github/workflows/_integration_test.yml @@ -52,6 +52,7 @@ jobs: - name: Run integration tests shell: bash env: + AI21_API_KEY: ${{ secrets.AI21_API_KEY }} GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} @@ -62,8 +63,13 @@ jobs: GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} EXA_API_KEY: ${{ secrets.EXA_API_KEY }} NOMIC_API_KEY: ${{ secrets.NOMIC_API_KEY }} + WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }} + WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }} PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} PINECONE_ENVIRONMENT: ${{ secrets.PINECONE_ENVIRONMENT }} + ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }} + ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }} + ASTRA_DB_KEYSPACE: ${{ secrets.ASTRA_DB_KEYSPACE }} run: | make integration_tests diff --git a/.github/workflows/_release.yml b/.github/workflows/_release.yml index a0b31c866497b..1221d6a6d6ae8 100644 --- a/.github/workflows/_release.yml +++ b/.github/workflows/_release.yml @@ -166,18 +166,30 @@ jobs: - name: Run integration tests if: ${{ startsWith(inputs.working-directory, 'libs/partners/') }} env: + AI21_API_KEY: ${{ secrets.AI21_API_KEY }} GOOGLE_API_KEY: ${{ secrets.GOOGLE_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }} OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + AZURE_OPENAI_API_VERSION: ${{ secrets.AZURE_OPENAI_API_VERSION }} + AZURE_OPENAI_API_BASE: ${{ secrets.AZURE_OPENAI_API_BASE }} + AZURE_OPENAI_API_KEY: ${{ secrets.AZURE_OPENAI_API_KEY }} + AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_CHAT_DEPLOYMENT_NAME }} + AZURE_OPENAI_LLM_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_LLM_DEPLOYMENT_NAME }} + AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME: ${{ secrets.AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT_NAME }} NVIDIA_API_KEY: ${{ secrets.NVIDIA_API_KEY }} GOOGLE_SEARCH_API_KEY: ${{ secrets.GOOGLE_SEARCH_API_KEY }} GOOGLE_CSE_ID: ${{ secrets.GOOGLE_CSE_ID }} EXA_API_KEY: ${{ secrets.EXA_API_KEY }} NOMIC_API_KEY: ${{ secrets.NOMIC_API_KEY }} + WATSONX_APIKEY: ${{ secrets.WATSONX_APIKEY }} + WATSONX_PROJECT_ID: ${{ secrets.WATSONX_PROJECT_ID }} PINECONE_API_KEY: ${{ secrets.PINECONE_API_KEY }} PINECONE_ENVIRONMENT: ${{ secrets.PINECONE_ENVIRONMENT }} + ASTRA_DB_API_ENDPOINT: ${{ secrets.ASTRA_DB_API_ENDPOINT }} + ASTRA_DB_APPLICATION_TOKEN: ${{ secrets.ASTRA_DB_APPLICATION_TOKEN }} + ASTRA_DB_KEYSPACE: ${{ secrets.ASTRA_DB_KEYSPACE }} run: make integration_tests working-directory: ${{ inputs.working-directory }} diff --git a/.github/workflows/api_doc_build.yml b/.github/workflows/api_doc_build.yml new file mode 100644 index 0000000000000..7f88768c56a62 --- /dev/null +++ b/.github/workflows/api_doc_build.yml @@ -0,0 +1,52 @@ +name: API docs build + +on: + workflow_dispatch: + schedule: + - cron: '0 13 * * *' +env: + POETRY_VERSION: "1.7.1" + PYTHON_VERSION: "3.10" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: bagatur/api_docs_build + + - name: Set Git config + run: | + git config --local user.email "actions@github.com" + git config --local user.name "Github Actions" + + - name: Merge master + run: | + git fetch origin master + git merge origin/master -m "Merge master" --allow-unrelated-histories -X theirs + + - name: Set up Python ${{ env.PYTHON_VERSION }} + Poetry ${{ env.POETRY_VERSION }} + uses: "./.github/actions/poetry_setup" + with: + python-version: ${{ env.PYTHON_VERSION }} + poetry-version: ${{ env.POETRY_VERSION }} + cache-key: api-docs + + - name: Install dependencies + run: | + poetry run python -m pip install --upgrade --no-cache-dir pip setuptools + poetry run python -m pip install --upgrade --no-cache-dir sphinx readthedocs-sphinx-ext + poetry run python -m pip install ./libs/partners/* + poetry run python -m pip install --exists-action=w --no-cache-dir -r docs/api_reference/requirements.txt + + - name: Build docs + run: | + poetry run python -m pip install --upgrade --no-cache-dir pip setuptools + poetry run python docs/api_reference/create_api_rst.py + poetry run python -m sphinx -T -E -b html -d _build/doctrees -c docs/api_reference docs/api_reference api_reference_build/html -j auto + + # https://github.com/marketplace/actions/add-commit + - uses: EndBug/add-and-commit@v9 + with: + message: 'Update API docs build' diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml index a5a63b996de86..738e0643346b8 100644 --- a/.github/workflows/codespell.yml +++ b/.github/workflows/codespell.yml @@ -34,3 +34,4 @@ jobs: with: skip: guide_imports.json ignore_words_list: ${{ steps.extract_ignore_words.outputs.ignore_words_list }} + exclude_file: libs/community/langchain_community/llms/yuan2.py diff --git a/.readthedocs.yaml b/.readthedocs.yaml index aad4d18ea4c0e..ca809231d9361 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -13,15 +13,8 @@ build: tools: python: "3.11" commands: - - python -m virtualenv $READTHEDOCS_VIRTUALENV_PATH - - python -m pip install --upgrade --no-cache-dir pip setuptools - - python -m pip install --upgrade --no-cache-dir sphinx readthedocs-sphinx-ext - - python -m pip install ./libs/partners/* - - python -m pip install --exists-action=w --no-cache-dir -r docs/api_reference/requirements.txt - - python docs/api_reference/create_api_rst.py - - cat docs/api_reference/conf.py - - python -m sphinx -T -E -b html -d _build/doctrees -c docs/api_reference docs/api_reference $READTHEDOCS_OUTPUT/html -j auto - + - mkdir -p $READTHEDOCS_OUTPUT + - cp -r api_reference_build/* $READTHEDOCS_OUTPUT # Build documentation in the docs/ directory with Sphinx sphinx: configuration: docs/api_reference/conf.py diff --git a/Makefile b/Makefile index c21d230ab474b..db50dfeed3283 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,12 @@ docs_build: docs/.local_build.sh docs_clean: - rm -r _dist + @if [ -d _dist ]; then \ + rm -r _dist; \ + echo "Directory _dist has been cleaned."; \ + else \ + echo "Nothing to clean."; \ + fi docs_linkcheck: poetry run linkchecker _dist/docs/ --ignore-url node_modules diff --git a/README.md b/README.md index a9490035ac828..65985434439d5 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ Looking for the JS/TS library? Check out [LangChain.js](https://github.com/langc To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com). [LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications. -Fill out [this form](https://airtable.com/appwQzlErAS2qiP0L/shrGtGaVBVAz7NcV2) to get off the waitlist or speak with our sales team. +Fill out [this form](https://www.langchain.com/contact-sales) to speak with our sales team. ## Quick Install diff --git a/cookbook/amazon_personalize_how_to.ipynb b/cookbook/amazon_personalize_how_to.ipynb new file mode 100644 index 0000000000000..7555e39d89494 --- /dev/null +++ b/cookbook/amazon_personalize_how_to.ipynb @@ -0,0 +1,284 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Amazon Personalize\n", + "\n", + "[Amazon Personalize](https://docs.aws.amazon.com/personalize/latest/dg/what-is-personalize.html) is a fully managed machine learning service that uses your data to generate item recommendations for your users. It can also generate user segments based on the users' affinity for certain items or item metadata.\n", + "\n", + "This notebook goes through how to use Amazon Personalize Chain. You need a Amazon Personalize campaign_arn or a recommender_arn before you get started with the below notebook.\n", + "\n", + "Following is a [tutorial](https://github.com/aws-samples/retail-demo-store/blob/master/workshop/1-Personalization/Lab-1-Introduction-and-data-preparation.ipynb) to setup a campaign_arn/recommender_arn on Amazon Personalize. Once the campaign_arn/recommender_arn is setup, you can use it in the langchain ecosystem. \n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 1. Install Dependencies" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "!pip install boto3" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 2. Sample Use-cases" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.1 [Use-case-1] Setup Amazon Personalize Client and retrieve recommendations" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_experimental.recommenders import AmazonPersonalize\n", + "\n", + "recommender_arn = \"\"\n", + "\n", + "client = AmazonPersonalize(\n", + " credentials_profile_name=\"default\",\n", + " region_name=\"us-west-2\",\n", + " recommender_arn=recommender_arn,\n", + ")\n", + "client.get_recommendations(user_id=\"1\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "### 2.2 [Use-case-2] Invoke Personalize Chain for summarizing results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "from langchain.llms.bedrock import Bedrock\n", + "from langchain_experimental.recommenders import AmazonPersonalizeChain\n", + "\n", + "bedrock_llm = Bedrock(model_id=\"anthropic.claude-v2\", region_name=\"us-west-2\")\n", + "\n", + "# Create personalize chain\n", + "# Use return_direct=True if you do not want summary\n", + "chain = AmazonPersonalizeChain.from_llm(\n", + " llm=bedrock_llm, client=client, return_direct=False\n", + ")\n", + "response = chain({\"user_id\": \"1\"})\n", + "print(response)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.3 [Use-Case-3] Invoke Amazon Personalize Chain using your own prompt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.prompts.prompt import PromptTemplate\n", + "\n", + "RANDOM_PROMPT_QUERY = \"\"\"\n", + "You are a skilled publicist. Write a high-converting marketing email advertising several movies available in a video-on-demand streaming platform next week, \n", + " given the movie and user information below. Your email will leverage the power of storytelling and persuasive language. \n", + " The movies to recommend and their information is contained in the tag. \n", + " All movies in the tag must be recommended. Give a summary of the movies and why the human should watch them. \n", + " Put the email between tags.\n", + "\n", + " \n", + " {result} \n", + " \n", + "\n", + " Assistant:\n", + " \"\"\"\n", + "\n", + "RANDOM_PROMPT = PromptTemplate(input_variables=[\"result\"], template=RANDOM_PROMPT_QUERY)\n", + "\n", + "chain = AmazonPersonalizeChain.from_llm(\n", + " llm=bedrock_llm, client=client, return_direct=False, prompt_template=RANDOM_PROMPT\n", + ")\n", + "chain.run({\"user_id\": \"1\", \"item_id\": \"234\"})" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2.4 [Use-case-4] Invoke Amazon Personalize in a Sequential Chain " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chains import LLMChain, SequentialChain\n", + "\n", + "RANDOM_PROMPT_QUERY_2 = \"\"\"\n", + "You are a skilled publicist. Write a high-converting marketing email advertising several movies available in a video-on-demand streaming platform next week, \n", + " given the movie and user information below. Your email will leverage the power of storytelling and persuasive language. \n", + " You want the email to impress the user, so make it appealing to them.\n", + " The movies to recommend and their information is contained in the tag. \n", + " All movies in the tag must be recommended. Give a summary of the movies and why the human should watch them. \n", + " Put the email between tags.\n", + "\n", + " \n", + " {result}\n", + " \n", + "\n", + " Assistant:\n", + " \"\"\"\n", + "\n", + "RANDOM_PROMPT_2 = PromptTemplate(\n", + " input_variables=[\"result\"], template=RANDOM_PROMPT_QUERY_2\n", + ")\n", + "personalize_chain_instance = AmazonPersonalizeChain.from_llm(\n", + " llm=bedrock_llm, client=client, return_direct=True\n", + ")\n", + "random_chain_instance = LLMChain(llm=bedrock_llm, prompt=RANDOM_PROMPT_2)\n", + "overall_chain = SequentialChain(\n", + " chains=[personalize_chain_instance, random_chain_instance],\n", + " input_variables=[\"user_id\"],\n", + " verbose=True,\n", + ")\n", + "overall_chain.run({\"user_id\": \"1\", \"item_id\": \"234\"})" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "### 2.5 [Use-case-5] Invoke Amazon Personalize and retrieve metadata " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "recommender_arn = \"\"\n", + "metadata_column_names = [\n", + " \"\",\n", + " \"\",\n", + "]\n", + "metadataMap = {\"ITEMS\": metadata_column_names}\n", + "\n", + "client = AmazonPersonalize(\n", + " credentials_profile_name=\"default\",\n", + " region_name=\"us-west-2\",\n", + " recommender_arn=recommender_arn,\n", + ")\n", + "client.get_recommendations(user_id=\"1\", metadataColumns=metadataMap)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "source": [ + "### 2.6 [Use-Case 6] Invoke Personalize Chain with returned metadata for summarizing results" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "bedrock_llm = Bedrock(model_id=\"anthropic.claude-v2\", region_name=\"us-west-2\")\n", + "\n", + "# Create personalize chain\n", + "# Use return_direct=True if you do not want summary\n", + "chain = AmazonPersonalizeChain.from_llm(\n", + " llm=bedrock_llm, client=client, return_direct=False\n", + ")\n", + "response = chain({\"user_id\": \"1\", \"metadata_columns\": metadataMap})\n", + "print(response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + }, + "vscode": { + "interpreter": { + "hash": "15e58ce194949b77a891bd4339ce3d86a9bd138e905926019517993f97db9e6c" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/cookbook/apache_kafka_message_handling.ipynb b/cookbook/apache_kafka_message_handling.ipynb new file mode 100644 index 0000000000000..616c12ac68039 --- /dev/null +++ b/cookbook/apache_kafka_message_handling.ipynb @@ -0,0 +1,922 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "rT1cmV4qCa2X" + }, + "source": [ + "# Using Apache Kafka to route messages\n", + "\n", + "---\n", + "\n", + "\n", + "\n", + "This notebook shows you how to use LangChain's standard chat features while passing the chat messages back and forth via Apache Kafka.\n", + "\n", + "This goal is to simulate an architecture where the chat front end and the LLM are running as separate services that need to communicate with one another over an internal nework.\n", + "\n", + "It's an alternative to typical pattern of requesting a reponse from the model via a REST API (there's more info on why you would want to do this at the end of the notebook)." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "UPYtfAR_9YxZ" + }, + "source": [ + "### 1. Install the main dependencies\n", + "\n", + "Dependencies include:\n", + "\n", + "- The Quix Streams library for managing interactions with Apache Kafka (or Kafka-like tools such as Redpanda) in a \"Pandas-like\" way.\n", + "- The LangChain library for managing interactions with Llama-2 and storing conversation state." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ZX5tfKiy9cN-" + }, + "outputs": [], + "source": [ + "!pip install quixstreams==2.1.2a langchain==0.0.340 huggingface_hub==0.19.4 langchain-experimental==0.0.42 python-dotenv" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "losTSdTB9d9O" + }, + "source": [ + "### 2. Build and install the llama-cpp-python library (with CUDA enabled so that we can advantage of Google Colab GPU\n", + "\n", + "The `llama-cpp-python` library is a Python wrapper around the `llama-cpp` library which enables you to efficiently leverage just a CPU to run quantized LLMs.\n", + "\n", + "When you use the standard `pip install llama-cpp-python` command, you do not get GPU support by default. Generation can be very slow if you rely on just the CPU in Google Colab, so the following command adds an extra option to build and install\n", + "`llama-cpp-python` with GPU support (make sure you have a GPU-enabled runtime selected in Google Colab)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "-JCQdl1G9tbl" + }, + "outputs": [], + "source": [ + "!CMAKE_ARGS=\"-DLLAMA_CUBLAS=on\" FORCE_CMAKE=1 pip install llama-cpp-python" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5_vjVIAh9rLl" + }, + "source": [ + "### 3. Download and setup Kafka and Zookeeper instances\n", + "\n", + "Download the Kafka binaries from the Apache website and start the servers as daemons. We'll use the default configurations (provided by Apache Kafka) for spinning up the instances." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "id": "zFz7czGRW5Wr" + }, + "outputs": [], + "source": [ + "!curl -sSOL https://dlcdn.apache.org/kafka/3.6.1/kafka_2.13-3.6.1.tgz\n", + "!tar -xzf kafka_2.13-3.6.1.tgz" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Uf7NR_UZ9wye" + }, + "outputs": [], + "source": [ + "!./kafka_2.13-3.6.1/bin/zookeeper-server-start.sh -daemon ./kafka_2.13-3.6.1/config/zookeeper.properties\n", + "!./kafka_2.13-3.6.1/bin/kafka-server-start.sh -daemon ./kafka_2.13-3.6.1/config/server.properties\n", + "!echo \"Waiting for 10 secs until kafka and zookeeper services are up and running\"\n", + "!sleep 10" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "H3SafFuS94p1" + }, + "source": [ + "### 4. Check that the Kafka Daemons are running\n", + "\n", + "Show the running processes and filter it for Java processes (you should see twoโ€”one for each server)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "CZDC2lQP99yp" + }, + "outputs": [], + "source": [ + "!ps aux | grep -E '[j]ava'" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Snoxmjb5-V37" + }, + "source": [ + "### 5. Import the required dependencies and initialize required variables\n", + "\n", + "Import the Quix Streams library for interacting with Kafka, and the necessary LangChain components for running a `ConversationChain`." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "id": "plR9e_MF-XL5" + }, + "outputs": [], + "source": [ + "# Import utility libraries\n", + "import json\n", + "import random\n", + "import re\n", + "import time\n", + "import uuid\n", + "from os import environ\n", + "from pathlib import Path\n", + "from random import choice, randint, random\n", + "\n", + "from dotenv import load_dotenv\n", + "\n", + "# Import a Hugging Face utility to download models directly from Hugging Face hub:\n", + "from huggingface_hub import hf_hub_download\n", + "from langchain.chains import ConversationChain\n", + "\n", + "# Import Langchain modules for managing prompts and conversation chains:\n", + "from langchain.llms import LlamaCpp\n", + "from langchain.memory import ConversationTokenBufferMemory\n", + "from langchain.prompts import PromptTemplate, load_prompt\n", + "from langchain.schema import SystemMessage\n", + "from langchain_experimental.chat_models import Llama2Chat\n", + "from quixstreams import Application, State, message_key\n", + "\n", + "# Import Quix dependencies\n", + "from quixstreams.kafka import Producer\n", + "\n", + "# Initialize global variables.\n", + "AGENT_ROLE = \"AI\"\n", + "chat_id = \"\"\n", + "\n", + "# Set the current role to the role constant and initialize variables for supplementary customer metadata:\n", + "role = AGENT_ROLE" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HgJjJ9aZ-liy" + }, + "source": [ + "### 6. Download the \"llama-2-7b-chat.Q4_K_M.gguf\" model\n", + "\n", + "Download the quantized LLama-2 7B model from Hugging Face which we will use as a local LLM (rather than relying on REST API calls to an external service)." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 67, + "referenced_widgets": [ + "969343cdbe604a26926679bbf8bd2dda", + "d8b8370c9b514715be7618bfe6832844", + "0def954cca89466b8408fadaf3b82e64", + "462482accc664729980562e208ceb179", + "80d842f73c564dc7b7cc316c763e2633", + "fa055d9f2a9d4a789e9cf3c89e0214e5", + "30ecca964a394109ac2ad757e3aec6c0", + "fb6478ce2dac489bb633b23ba0953c5c", + "734b0f5da9fc4307a95bab48cdbb5d89", + "b32f3a86a74741348511f4e136744ac8", + "e409071bff5a4e2d9bf0e9f5cc42231b" + ] + }, + "id": "Qwu4YoSA-503", + "outputId": "f956976c-7485-415b-ac93-4336ade31964" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The model path does not exist in state. Downloading model...\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "969343cdbe604a26926679bbf8bd2dda", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "llama-2-7b-chat.Q4_K_M.gguf: 0%| | 0.00/4.08G [00:00 2:chain:RunnableParallel] Entering Chain run with input:\n", + "\u001b[0m{\n", + " \"input\": \"What is the first transduction model relying entirely on self-attention?\"\n", + "}\n", + "\u001b[32;1m\u001b[1;3m[chain/start]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 2:chain:RunnableParallel > 4:chain:RunnablePassthrough] Entering Chain run with input:\n", + "\u001b[0m{\n", + " \"input\": \"What is the first transduction model relying entirely on self-attention?\"\n", + "}\n", + "\u001b[36;1m\u001b[1;3m[chain/end]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 2:chain:RunnableParallel > 4:chain:RunnablePassthrough] [1ms] Exiting Chain run with output:\n", + "\u001b[0m{\n", + " \"output\": \"What is the first transduction model relying entirely on self-attention?\"\n", + "}\n", + "\u001b[36;1m\u001b[1;3m[chain/end]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 2:chain:RunnableParallel] [66ms] Exiting Chain run with output:\n", + "\u001b[0m[outputs]\n", + "\u001b[32;1m\u001b[1;3m[chain/start]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 5:prompt:ChatPromptTemplate] Entering Prompt run with input:\n", + "\u001b[0m[inputs]\n", + "\u001b[36;1m\u001b[1;3m[chain/end]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 5:prompt:ChatPromptTemplate] [1ms] Exiting Prompt run with output:\n", + "\u001b[0m{\n", + " \"lc\": 1,\n", + " \"type\": \"constructor\",\n", + " \"id\": [\n", + " \"langchain\",\n", + " \"prompts\",\n", + " \"chat\",\n", + " \"ChatPromptValue\"\n", + " ],\n", + " \"kwargs\": {\n", + " \"messages\": [\n", + " {\n", + " \"lc\": 1,\n", + " \"type\": \"constructor\",\n", + " \"id\": [\n", + " \"langchain\",\n", + " \"schema\",\n", + " \"messages\",\n", + " \"HumanMessage\"\n", + " ],\n", + " \"kwargs\": {\n", + " \"content\": \"You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.\\nQuestion: What is the first transduction model relying entirely on self-attention? \\nContext: [Document(page_content='To the best of our knowledge, however, the Transformer is the first transduction model relying entirely on self-attention to compute representations of its input and output without using sequence-aligned RNNs or convolution.\\\\nIn the following sections, we will describe the Transformer, motivate self-attention and discuss its advantages over models such as (neural_gpu, ; NalBytenet2017, ) and (JonasFaceNet2017, ).\\\\n\\\\n\\\\n\\\\n\\\\n3 Model Architecture\\\\n\\\\nFigure 1: The Transformer - model architecture.', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='In this work, we presented the Transformer, the first sequence transduction model based entirely on attention, replacing the recurrent layers most commonly used in encoder-decoder architectures with multi-headed self-attention.\\\\n\\\\n\\\\nFor translation tasks, the Transformer can be trained significantly faster than architectures based on recurrent or convolutional layers. On both WMT 2014 English-to-German and WMT 2014 English-to-French translation tasks, we achieve a new state of the art. In the former task our best model outperforms even all previously reported ensembles. \\\\n\\\\n\\\\nWe are excited about the future of attention-based models and plan to apply them to other tasks. We plan to extend the Transformer to problems involving input and output modalities other than text and to investigate local, restricted attention mechanisms to efficiently handle large inputs and outputs such as images, audio and video.\\\\nMaking generation less sequential is another research goals of ours.', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='Attention mechanisms have become an integral part of compelling sequence modeling and transduction models in various tasks, allowing modeling of dependencies without regard to their distance in the input or output sequences (bahdanau2014neural, ; structuredAttentionNetworks, ). In all but a few cases (decomposableAttnModel, ), however, such attention mechanisms are used in conjunction with a recurrent network.\\\\n\\\\n\\\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead relying entirely on an attention mechanism to draw global dependencies between input and output. The Transformer allows for significantly more parallelization and can reach a new state of the art in translation quality after being trained for as little as twelve hours on eight P100 GPUs.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n2 Background', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='The dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. Experiments on two machine translation tasks show these models to be superior in quality while being more parallelizable and requiring significantly less time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-to-German translation task, improving over the existing best results, including ensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task, our model establishes a new single-model state-of-the-art BLEU score of 41.8 after training for 3.5 days on eight GPUs, a small fraction of the training costs of the best models from the literature. We show that the', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'})] \\nAnswer:\",\n", + " \"additional_kwargs\": {}\n", + " }\n", + " }\n", + " ]\n", + " }\n", + "}\n", + "\u001b[32;1m\u001b[1;3m[llm/start]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 6:llm:HuggingFacePipeline] Entering LLM run with input:\n", + "\u001b[0m{\n", + " \"prompts\": [\n", + " \"Human: You are an assistant for question-answering tasks. Use the following pieces of retrieved context to answer the question. If you don't know the answer, just say that you don't know. Use three sentences maximum and keep the answer concise.\\nQuestion: What is the first transduction model relying entirely on self-attention? \\nContext: [Document(page_content='To the best of our knowledge, however, the Transformer is the first transduction model relying entirely on self-attention to compute representations of its input and output without using sequence-aligned RNNs or convolution.\\\\nIn the following sections, we will describe the Transformer, motivate self-attention and discuss its advantages over models such as (neural_gpu, ; NalBytenet2017, ) and (JonasFaceNet2017, ).\\\\n\\\\n\\\\n\\\\n\\\\n3 Model Architecture\\\\n\\\\nFigure 1: The Transformer - model architecture.', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='In this work, we presented the Transformer, the first sequence transduction model based entirely on attention, replacing the recurrent layers most commonly used in encoder-decoder architectures with multi-headed self-attention.\\\\n\\\\n\\\\nFor translation tasks, the Transformer can be trained significantly faster than architectures based on recurrent or convolutional layers. On both WMT 2014 English-to-German and WMT 2014 English-to-French translation tasks, we achieve a new state of the art. In the former task our best model outperforms even all previously reported ensembles. \\\\n\\\\n\\\\nWe are excited about the future of attention-based models and plan to apply them to other tasks. We plan to extend the Transformer to problems involving input and output modalities other than text and to investigate local, restricted attention mechanisms to efficiently handle large inputs and outputs such as images, audio and video.\\\\nMaking generation less sequential is another research goals of ours.', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='Attention mechanisms have become an integral part of compelling sequence modeling and transduction models in various tasks, allowing modeling of dependencies without regard to their distance in the input or output sequences (bahdanau2014neural, ; structuredAttentionNetworks, ). In all but a few cases (decomposableAttnModel, ), however, such attention mechanisms are used in conjunction with a recurrent network.\\\\n\\\\n\\\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead relying entirely on an attention mechanism to draw global dependencies between input and output. The Transformer allows for significantly more parallelization and can reach a new state of the art in translation quality after being trained for as little as twelve hours on eight P100 GPUs.\\\\n\\\\n\\\\n\\\\n\\\\n\\\\n2 Background', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'}), Document(page_content='The dominant sequence transduction models are based on complex recurrent or convolutional neural networks that include an encoder and a decoder. The best performing models also connect the encoder and decoder through an attention mechanism. We propose a new simple network architecture, the Transformer, based solely on attention mechanisms, dispensing with recurrence and convolutions entirely. Experiments on two machine translation tasks show these models to be superior in quality while being more parallelizable and requiring significantly less time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-to-German translation task, improving over the existing best results, including ensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task, our model establishes a new single-model state-of-the-art BLEU score of 41.8 after training for 3.5 days on eight GPUs, a small fraction of the training costs of the best models from the literature. We show that the', metadata={'source': 'https://ar5iv.labs.arxiv.org/html/1706.03762', 'title': '[1706.03762] Attention Is All You Need', 'language': 'en'})] \\nAnswer:\"\n", + " ]\n", + "}\n", + "\u001b[36;1m\u001b[1;3m[llm/end]\u001b[0m \u001b[1m[1:chain:RunnableSequence > 6:llm:HuggingFacePipeline] [4.34s] Exiting LLM run with output:\n", + "\u001b[0m{\n", + " \"generations\": [\n", + " [\n", + " {\n", + " \"text\": \" The first transduction model relying entirely on self-attention is the Transformer.\",\n", + " \"generation_info\": null,\n", + " \"type\": \"Generation\"\n", + " }\n", + " ]\n", + " ],\n", + " \"llm_output\": null,\n", + " \"run\": null\n", + "}\n", + "\u001b[36;1m\u001b[1;3m[chain/end]\u001b[0m \u001b[1m[1:chain:RunnableSequence] [4.41s] Exiting Chain run with output:\n", + "\u001b[0m{\n", + " \"output\": \" The first transduction model relying entirely on self-attention is the Transformer.\"\n", + "}\n" + ] + } + ], + "source": [ + "langchain.verbose = True\n", + "langchain.debug = True\n", + "\n", + "llm_res = rag_chain.invoke(\n", + " \"What is the first transduction model relying entirely on self-attention?\",\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "id": "023404a1-401a-46e1-8ab5-cafbc8593b04", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "' The first transduction model relying entirely on self-attention is the Transformer.'" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "llm_res" + ] + }, + { + "cell_type": "markdown", + "id": "0eaefd01-254a-445d-a95f-37889c126e0e", + "metadata": {}, + "source": [ + "Based on the retrieved documents, the answer is indeed correct :)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/cookbook/self-discover.ipynb b/cookbook/self-discover.ipynb new file mode 100644 index 0000000000000..2e885ad0ba71b --- /dev/null +++ b/cookbook/self-discover.ipynb @@ -0,0 +1,423 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "a38e5d2d-7587-4192-90f2-b58e6c62f08c", + "metadata": {}, + "source": [ + "# Self Discover\n", + "\n", + "An implementation of the [Self-Discover paper](https://arxiv.org/pdf/2402.03620.pdf).\n", + "\n", + "Based on [this implementation from @catid](https://github.com/catid/self-discover/tree/main?tab=readme-ov-file)" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "a18d8f24-5d9a-45c5-9739-6f3c4ed6c9c9", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_openai import ChatOpenAI" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "9f554045-6e79-42d3-be4b-835bbbd0b78c", + "metadata": {}, + "outputs": [], + "source": [ + "model = ChatOpenAI(temperature=0, model=\"gpt-4-turbo-preview\")" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "9e9925aa-638a-4862-823e-9803402b8f82", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain import hub\n", + "from langchain_core.prompts import PromptTemplate" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "c4cc5c8c-f6a5-42c7-9ed5-780d79b3b29a", + "metadata": {}, + "outputs": [], + "source": [ + "select_prompt = hub.pull(\"hwchase17/self-discovery-select\")" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "a5b53d29-f5b6-4f39-af97-bb6b133e1d18", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Select several reasoning modules that are crucial to utilize in order to solve the given task:\n", + "\n", + "All reasoning module descriptions:\n", + "\u001b[33;1m\u001b[1;3m{reasoning_modules}\u001b[0m\n", + "\n", + "Task: \u001b[33;1m\u001b[1;3m{task_description}\u001b[0m\n", + "\n", + "Select several modules are crucial for solving the task above:\n", + "\n" + ] + } + ], + "source": [ + "select_prompt.pretty_print()" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "26eaa6bc-5202-4b22-9522-33f227c8eb55", + "metadata": {}, + "outputs": [], + "source": [ + "adapt_prompt = hub.pull(\"hwchase17/self-discovery-adapt\")" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "dc30afb9-180d-417b-9935-f7ef166710b8", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Rephrase and specify each reasoning module so that it better helps solving the task:\n", + "\n", + "SELECTED module descriptions:\n", + "\u001b[33;1m\u001b[1;3m{selected_modules}\u001b[0m\n", + "\n", + "Task: \u001b[33;1m\u001b[1;3m{task_description}\u001b[0m\n", + "\n", + "Adapt each reasoning module description to better solve the task:\n", + "\n" + ] + } + ], + "source": [ + "adapt_prompt.pretty_print()" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "a93253a9-8f50-49dd-8815-c3927bae1905", + "metadata": {}, + "outputs": [], + "source": [ + "structured_prompt = hub.pull(\"hwchase17/self-discovery-structure\")" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "8ea8dd78-4285-400b-83d2-c4a241903a79", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Operationalize the reasoning modules into a step-by-step reasoning plan in JSON format:\n", + "\n", + "Here's an example:\n", + "\n", + "Example task:\n", + "\n", + "If you follow these instructions, do you return to the starting point? Always face forward. Take 1 step backward. Take 9 steps left. Take 2 steps backward. Take 6 steps forward. Take 4 steps forward. Take 4 steps backward. Take 3 steps right.\n", + "\n", + "Example reasoning structure:\n", + "\n", + "{\n", + " \"Position after instruction 1\":\n", + " \"Position after instruction 2\":\n", + " \"Position after instruction n\":\n", + " \"Is final position the same as starting position\":\n", + "}\n", + "\n", + "Adapted module description:\n", + "\u001b[33;1m\u001b[1;3m{adapted_modules}\u001b[0m\n", + "\n", + "Task: \u001b[33;1m\u001b[1;3m{task_description}\u001b[0m\n", + "\n", + "Implement a reasoning structure for solvers to follow step-by-step and arrive at correct answer.\n", + "\n", + "Note: do NOT actually arrive at a conclusion in this pass. Your job is to generate a PLAN so that in the future you can fill it out and arrive at the correct conclusion for tasks like this\n" + ] + } + ], + "source": [ + "structured_prompt.pretty_print()" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "f3d4d79d-f414-4588-b476-4a35b3ba6fbf", + "metadata": {}, + "outputs": [], + "source": [ + "reasoning_prompt = hub.pull(\"hwchase17/self-discovery-reasoning\")" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "23d1e32e-d12e-454a-8484-c08e250e3262", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Follow the step-by-step reasoning plan in JSON to correctly solve the task. Fill in the values following the keys by reasoning specifically about the task given. Do not simply rephrase the keys.\n", + " \n", + "Reasoning Structure:\n", + "\u001b[33;1m\u001b[1;3m{reasoning_structure}\u001b[0m\n", + "\n", + "Task: \u001b[33;1m\u001b[1;3m{task_description}\u001b[0m\n" + ] + } + ], + "source": [ + "reasoning_prompt.pretty_print()" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "7b9af01d-da28-4785-b069-efea61905cfa", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "PromptTemplate(input_variables=['reasoning_structure', 'task_description'], template='Follow the step-by-step reasoning plan in JSON to correctly solve the task. Fill in the values following the keys by reasoning specifically about the task given. Do not simply rephrase the keys.\\n \\nReasoning Structure:\\n{reasoning_structure}\\n\\nTask: {task_description}')" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "reasoning_prompt" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "399bf160-e257-429f-b27e-66d4063f195f", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.runnables import RunnablePassthrough" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "5c3bd203-7dc1-457e-813f-283aaf059ec0", + "metadata": {}, + "outputs": [], + "source": [ + "select_chain = select_prompt | model | StrOutputParser()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "86420da0-7cc2-4659-853e-9c3ef808e47c", + "metadata": {}, + "outputs": [], + "source": [ + "adapt_chain = adapt_prompt | model | StrOutputParser()" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "270a3905-58a3-4650-96ca-e8254040285f", + "metadata": {}, + "outputs": [], + "source": [ + "structure_chain = structured_prompt | model | StrOutputParser()" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "id": "55b486cc-36be-497e-9eba-9c8dc228f2d1", + "metadata": {}, + "outputs": [], + "source": [ + "reasoning_chain = reasoning_prompt | model | StrOutputParser()" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "id": "92d8d484-055b-48a8-98bc-e7d40c12db2e", + "metadata": {}, + "outputs": [], + "source": [ + "overall_chain = (\n", + " RunnablePassthrough.assign(selected_modules=select_chain)\n", + " .assign(adapted_modules=adapt_chain)\n", + " .assign(reasoning_structure=structure_chain)\n", + " .assign(answer=reasoning_chain)\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "id": "29fe385b-cf5d-4581-80e7-55462f5628bb", + "metadata": {}, + "outputs": [], + "source": [ + "reasoning_modules = [\n", + " \"1. How could I devise an experiment to help solve that problem?\",\n", + " \"2. Make a list of ideas for solving this problem, and apply them one by one to the problem to see if any progress can be made.\",\n", + " # \"3. How could I measure progress on this problem?\",\n", + " \"4. How can I simplify the problem so that it is easier to solve?\",\n", + " \"5. What are the key assumptions underlying this problem?\",\n", + " \"6. What are the potential risks and drawbacks of each solution?\",\n", + " \"7. What are the alternative perspectives or viewpoints on this problem?\",\n", + " \"8. What are the long-term implications of this problem and its solutions?\",\n", + " \"9. How can I break down this problem into smaller, more manageable parts?\",\n", + " \"10. Critical Thinking: This style involves analyzing the problem from different perspectives, questioning assumptions, and evaluating the evidence or information available. It focuses on logical reasoning, evidence-based decision-making, and identifying potential biases or flaws in thinking.\",\n", + " \"11. Try creative thinking, generate innovative and out-of-the-box ideas to solve the problem. Explore unconventional solutions, thinking beyond traditional boundaries, and encouraging imagination and originality.\",\n", + " # \"12. Seek input and collaboration from others to solve the problem. Emphasize teamwork, open communication, and leveraging the diverse perspectives and expertise of a group to come up with effective solutions.\",\n", + " \"13. Use systems thinking: Consider the problem as part of a larger system and understanding the interconnectedness of various elements. Focuses on identifying the underlying causes, feedback loops, and interdependencies that influence the problem, and developing holistic solutions that address the system as a whole.\",\n", + " \"14. Use Risk Analysis: Evaluate potential risks, uncertainties, and tradeoffs associated with different solutions or approaches to a problem. Emphasize assessing the potential consequences and likelihood of success or failure, and making informed decisions based on a balanced analysis of risks and benefits.\",\n", + " # \"15. Use Reflective Thinking: Step back from the problem, take the time for introspection and self-reflection. Examine personal biases, assumptions, and mental models that may influence problem-solving, and being open to learning from past experiences to improve future approaches.\",\n", + " \"16. What is the core issue or problem that needs to be addressed?\",\n", + " \"17. What are the underlying causes or factors contributing to the problem?\",\n", + " \"18. Are there any potential solutions or strategies that have been tried before? If yes, what were the outcomes and lessons learned?\",\n", + " \"19. What are the potential obstacles or challenges that might arise in solving this problem?\",\n", + " \"20. Are there any relevant data or information that can provide insights into the problem? If yes, what data sources are available, and how can they be analyzed?\",\n", + " \"21. Are there any stakeholders or individuals who are directly affected by the problem? What are their perspectives and needs?\",\n", + " \"22. What resources (financial, human, technological, etc.) are needed to tackle the problem effectively?\",\n", + " \"23. How can progress or success in solving the problem be measured or evaluated?\",\n", + " \"24. What indicators or metrics can be used?\",\n", + " \"25. Is the problem a technical or practical one that requires a specific expertise or skill set? Or is it more of a conceptual or theoretical problem?\",\n", + " \"26. Does the problem involve a physical constraint, such as limited resources, infrastructure, or space?\",\n", + " \"27. Is the problem related to human behavior, such as a social, cultural, or psychological issue?\",\n", + " \"28. Does the problem involve decision-making or planning, where choices need to be made under uncertainty or with competing objectives?\",\n", + " \"29. Is the problem an analytical one that requires data analysis, modeling, or optimization techniques?\",\n", + " \"30. Is the problem a design challenge that requires creative solutions and innovation?\",\n", + " \"31. Does the problem require addressing systemic or structural issues rather than just individual instances?\",\n", + " \"32. Is the problem time-sensitive or urgent, requiring immediate attention and action?\",\n", + " \"33. What kinds of solution typically are produced for this kind of problem specification?\",\n", + " \"34. Given the problem specification and the current best solution, have a guess about other possible solutions.\"\n", + " \"35. Letโ€™s imagine the current best solution is totally wrong, what other ways are there to think about the problem specification?\"\n", + " \"36. What is the best way to modify this current best solution, given what you know about these kinds of problem specification?\"\n", + " \"37. Ignoring the current best solution, create an entirely new solution to the problem.\"\n", + " # \"38. Letโ€™s think step by step.\"\n", + " \"39. Letโ€™s make a step by step plan and implement it with good notation and explanation.\",\n", + "]\n", + "\n", + "\n", + "task_example = \"Lisa has 10 apples. She gives 3 apples to her friend and then buys 5 more apples from the store. How many apples does Lisa have now?\"\n", + "\n", + "task_example = \"\"\"This SVG path element draws a:\n", + "(A) circle (B) heptagon (C) hexagon (D) kite (E) line (F) octagon (G) pentagon(H) rectangle (I) sector (J) triangle\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "id": "6cbfbe81-f751-42da-843a-f9003ace663d", + "metadata": {}, + "outputs": [], + "source": [ + "reasoning_modules_str = \"\\n\".join(reasoning_modules)" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "id": "d411c7aa-7017-4d67-88b5-43b5d161c34c", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'task_description': 'This SVG path element draws a:\\n(A) circle (B) heptagon (C) hexagon (D) kite (E) line (F) octagon (G) pentagon(H) rectangle (I) sector (J) triangle',\n", + " 'reasoning_modules': '1. How could I devise an experiment to help solve that problem?\\n2. Make a list of ideas for solving this problem, and apply them one by one to the problem to see if any progress can be made.\\n4. How can I simplify the problem so that it is easier to solve?\\n5. What are the key assumptions underlying this problem?\\n6. What are the potential risks and drawbacks of each solution?\\n7. What are the alternative perspectives or viewpoints on this problem?\\n8. What are the long-term implications of this problem and its solutions?\\n9. How can I break down this problem into smaller, more manageable parts?\\n10. Critical Thinking: This style involves analyzing the problem from different perspectives, questioning assumptions, and evaluating the evidence or information available. It focuses on logical reasoning, evidence-based decision-making, and identifying potential biases or flaws in thinking.\\n11. Try creative thinking, generate innovative and out-of-the-box ideas to solve the problem. Explore unconventional solutions, thinking beyond traditional boundaries, and encouraging imagination and originality.\\n13. Use systems thinking: Consider the problem as part of a larger system and understanding the interconnectedness of various elements. Focuses on identifying the underlying causes, feedback loops, and interdependencies that influence the problem, and developing holistic solutions that address the system as a whole.\\n14. Use Risk Analysis: Evaluate potential risks, uncertainties, and tradeoffs associated with different solutions or approaches to a problem. Emphasize assessing the potential consequences and likelihood of success or failure, and making informed decisions based on a balanced analysis of risks and benefits.\\n16. What is the core issue or problem that needs to be addressed?\\n17. What are the underlying causes or factors contributing to the problem?\\n18. Are there any potential solutions or strategies that have been tried before? If yes, what were the outcomes and lessons learned?\\n19. What are the potential obstacles or challenges that might arise in solving this problem?\\n20. Are there any relevant data or information that can provide insights into the problem? If yes, what data sources are available, and how can they be analyzed?\\n21. Are there any stakeholders or individuals who are directly affected by the problem? What are their perspectives and needs?\\n22. What resources (financial, human, technological, etc.) are needed to tackle the problem effectively?\\n23. How can progress or success in solving the problem be measured or evaluated?\\n24. What indicators or metrics can be used?\\n25. Is the problem a technical or practical one that requires a specific expertise or skill set? Or is it more of a conceptual or theoretical problem?\\n26. Does the problem involve a physical constraint, such as limited resources, infrastructure, or space?\\n27. Is the problem related to human behavior, such as a social, cultural, or psychological issue?\\n28. Does the problem involve decision-making or planning, where choices need to be made under uncertainty or with competing objectives?\\n29. Is the problem an analytical one that requires data analysis, modeling, or optimization techniques?\\n30. Is the problem a design challenge that requires creative solutions and innovation?\\n31. Does the problem require addressing systemic or structural issues rather than just individual instances?\\n32. Is the problem time-sensitive or urgent, requiring immediate attention and action?\\n33. What kinds of solution typically are produced for this kind of problem specification?\\n34. Given the problem specification and the current best solution, have a guess about other possible solutions.35. Letโ€™s imagine the current best solution is totally wrong, what other ways are there to think about the problem specification?36. What is the best way to modify this current best solution, given what you know about these kinds of problem specification?37. Ignoring the current best solution, create an entirely new solution to the problem.39. Letโ€™s make a step by step plan and implement it with good notation and explanation.',\n", + " 'selected_modules': 'To solve the task of identifying the shape drawn by the given SVG path element, the following reasoning modules are crucial:\\n\\n1. **Critical Thinking (10)**: This involves analyzing the SVG path commands and coordinates logically to understand the shape they form. It requires questioning assumptions (e.g., not assuming the shape based on a quick glance at the coordinates but rather analyzing the path commands and their implications) and evaluating the information provided by the SVG path data.\\n\\n2. **Analytical Problem Solving (29)**: The task requires data analysis skills to interpret the SVG path commands and coordinates. Understanding how the \"M\" (moveto) and \"L\" (lineto) commands work to draw lines between specified points is essential for determining the shape.\\n\\n3. **Creative Thinking (11)**: While the task primarily involves analytical skills, creative thinking can help in visualizing the shape that the path commands are likely to form, especially when the path data doesn\\'t immediately suggest a common shape.\\n\\n4. **Systems Thinking (13)**: Recognizing the SVG path as part of a larger system (in this case, the SVG graphics system) and understanding how individual path commands contribute to the overall shape can be helpful. This involves understanding the interconnectedness of the start and end points of each line segment and how they come together to form a complete shape.\\n\\n5. **Break Down the Problem (9)**: Breaking down the SVG path into its individual commands and analyzing each segment between \"M\" and \"L\" commands can simplify the task. This makes it easier to visualize and understand the shape being drawn step by step.\\n\\n6. **Visualization (not explicitly listed but implied in creative and analytical thinking)**: Visualizing the path that the \"M\" and \"L\" commands create is essential. This isn\\'t a listed module but is a skill that underpins both creative and analytical approaches to solving this problem.\\n\\nGiven the SVG path commands, one would analyze each segment drawn by \"M\" (moveto) and \"L\" (lineto) commands to determine the shape\\'s vertices and sides. This process involves critical thinking to assess the information, analytical skills to interpret the path data, and a degree of creative thinking for visualization. The task does not directly involve assessing risks, long-term implications, or stakeholder perspectives, so modules focused on those aspects (e.g., Risk Analysis (14), Long-term Implications (8)) are less relevant here.',\n", + " 'adapted_modules': 'To enhance the process of identifying the shape drawn by the given SVG path element, the reasoning modules can be adapted and specified as follows:\\n\\n1. **Detailed Path Analysis (Critical Thinking)**: This module focuses on a meticulous examination of the SVG path commands and coordinates. It involves a deep dive into the syntax and semantics of path commands such as \"M\" (moveto) and \"L\" (lineto), challenging initial perceptions and rigorously interpreting the sequence of commands to deduce the shape accurately. This analysis goes beyond surface-level inspection, requiring a systematic questioning of each command\\'s role in constructing the overall shape.\\n\\n2. **Path Command Interpretation (Analytical Problem Solving)**: Essential for this task is the ability to decode the SVG path\\'s \"M\" and \"L\" commands, translating these instructions into a mental or visual representation of the shape\\'s geometry. This module emphasizes the analytical dissection of the path data, focusing on how each command contributes to the formation of vertices and edges, thereby facilitating the identification of the shape.\\n\\n3. **Shape Visualization (Creative Thinking)**: Leveraging imagination to mentally construct the shape from the path commands is the core of this module. It involves creatively synthesizing the segments drawn by the \"M\" and \"L\" commands into a coherent visual image, even when the path data does not immediately suggest a recognizable shape. This creative process aids in bridging gaps in the analytical interpretation, offering alternative perspectives on the possible shape outcomes.\\n\\n4. **Path-to-Shape Synthesis (Systems Thinking)**: This module entails understanding the SVG path as a component within the broader context of vector graphics, focusing on how individual path commands interlink to form a cohesive shape. It requires an appreciation of the cumulative effect of each command in relation to the others, recognizing the systemic relationship between the starting and ending points of segments and their collective role in shaping the final figure.\\n\\n5. **Sequential Command Analysis (Break Down the Problem)**: By segmenting the SVG path into discrete commands, this approach simplifies the complexity of the task. It advocates for a step-by-step examination of the path, where each \"M\" to \"L\" sequence is analyzed in isolation before synthesizing the findings to understand the overall shape. This methodical breakdown facilitates a clearer visualization and comprehension of the shape being drawn.\\n\\n6. **Command-to-Geometry Mapping (Visualization)**: Central to solving this task is the ability to map the abstract \"M\" and \"L\" commands onto a concrete geometric representation. This implicit module underlies both the analytical and creative thinking processes, focusing on converting the path data into a visual form that can be easily understood and manipulated mentally. It is about constructing a mental image of the shape as each command is processed, enabling a dynamic visualization that evolves with each new piece of path data.\\n\\nBy adapting and specifying these reasoning modules, the task of identifying the shape drawn by the SVG path element becomes a structured process that leverages critical analysis, analytical problem-solving, creative visualization, systemic thinking, and methodical breakdown to accurately determine the shape as a (D) kite.',\n", + " 'reasoning_structure': '```json\\n{\\n \"Step 1: Detailed Path Analysis\": {\\n \"Description\": \"Examine each SVG path command and its coordinates closely. Understand the syntax and semantics of \\'M\\' (moveto) and \\'L\\' (lineto) commands.\",\\n \"Action\": \"List all path commands and their coordinates.\",\\n \"Expected Outcome\": \"A clear understanding of the sequence and direction of each path command.\"\\n },\\n \"Step 2: Path Command Interpretation\": {\\n \"Description\": \"Decode the \\'M\\' and \\'L\\' commands to translate these instructions into a mental or visual representation of the shape\\'s geometry.\",\\n \"Action\": \"Map each \\'M\\' and \\'L\\' command to its corresponding action (move or draw line) in the context of the shape.\",\\n \"Expected Outcome\": \"A segmented representation of the shape, highlighting vertices and edges.\"\\n },\\n \"Step 3: Shape Visualization\": {\\n \"Description\": \"Use imagination to mentally construct the shape from the path commands, synthesizing the segments into a coherent visual image.\",\\n \"Action\": \"Visualize the shape based on the segmented representation from Step 2.\",\\n \"Expected Outcome\": \"A mental image of the potential shape, considering the sequence and direction of path commands.\"\\n },\\n \"Step 4: Path-to-Shape Synthesis\": {\\n \"Description\": \"Understand the SVG path as a component within the broader context of vector graphics, focusing on how individual path commands interlink to form a cohesive shape.\",\\n \"Action\": \"Analyze the systemic relationship between the starting and ending points of segments and their collective role in shaping the final figure.\",\\n \"Expected Outcome\": \"Identification of the overall shape by recognizing the cumulative effect of each command.\"\\n },\\n \"Step 5: Sequential Command Analysis\": {\\n \"Description\": \"Segment the SVG path into discrete commands for a step-by-step examination, analyzing each \\'M\\' to \\'L\\' sequence in isolation.\",\\n \"Action\": \"Break down the path into individual commands and analyze each separately before synthesizing the findings.\",\\n \"Expected Outcome\": \"A clearer visualization and comprehension of the shape being drawn, segment by segment.\"\\n },\\n \"Step 6: Command-to-Geometry Mapping\": {\\n \"Description\": \"Map the abstract \\'M\\' and \\'L\\' commands onto a concrete geometric representation, constructing a mental image of the shape as each command is processed.\",\\n \"Action\": \"Convert the path data into a visual form that can be easily understood and manipulated mentally.\",\\n \"Expected Outcome\": \"A dynamic visualization of the shape that evolves with each new piece of path data, leading to the identification of the shape as a kite.\"\\n },\\n \"Conclusion\": {\\n \"Description\": \"Based on the analysis and visualization steps, determine the shape drawn by the SVG path element.\",\\n \"Action\": \"Review the outcomes of each step and synthesize the information to identify the shape.\",\\n \"Expected Outcome\": \"The correct identification of the shape, supported by the structured analysis and reasoning process.\"\\n }\\n}\\n```',\n", + " 'answer': 'Based on the provided reasoning structure and the SVG path element given, let\\'s analyze the path commands to identify the shape.\\n\\n**Step 1: Detailed Path Analysis**\\n- Description: The SVG path provided contains multiple \\'M\\' (moveto) and \\'L\\' (lineto) commands. Each command specifies a point in a 2D coordinate system.\\n- Action: The path commands are as follows:\\n 1. M 55.57,80.69 (Move to point)\\n 2. L 57.38,65.80 (Line to point)\\n 3. M 57.38,65.80 (Move to point)\\n 4. L 48.90,57.46 (Line to point)\\n 5. M 48.90,57.46 (Move to point)\\n 6. L 45.58,47.78 (Line to point)\\n 7. M 45.58,47.78 (Move to point)\\n 8. L 53.25,36.07 (Line to point)\\n 9. L 66.29,48.90 (Line to point)\\n 10. L 78.69,61.09 (Line to point)\\n 11. L 55.57,80.69 (Line to point)\\n- Expected Outcome: Understanding that the path commands describe a series of movements and lines that form a closed shape.\\n\\n**Step 2: Path Command Interpretation**\\n- Description: The \\'M\\' and \\'L\\' commands are used to move the \"pen\" to a starting point and draw lines to subsequent points, respectively.\\n- Action: The commands describe a shape starting at (55.57,80.69), drawing lines through several points, and finally closing the shape by returning to the starting point.\\n- Expected Outcome: A segmented representation showing a shape with distinct vertices at the specified coordinates.\\n\\n**Step 3: Shape Visualization**\\n- Description: Mentally constructing the shape from the provided path commands.\\n- Action: Visualizing the lines connecting in sequence from the starting point, through each point described by the \\'L\\' commands, and back to the starting point.\\n- Expected Outcome: A mental image of a shape that appears to have four distinct sides, suggesting it could be a quadrilateral.\\n\\n**Step 4: Path-to-Shape Synthesis**\\n- Description: Understanding how the path commands collectively form a specific shape.\\n- Action: Recognizing that the shape starts and ends at the same point, with lines drawn between intermediate points without overlapping, except at the starting/ending point.\\n- Expected Outcome: Identification of a closed, four-sided figure, which suggests it could be a kite based on the symmetry and structure of the lines.\\n\\n**Step 5: Sequential Command Analysis**\\n- Description: Analyzing each \\'M\\' to \\'L\\' sequence in isolation.\\n- Action: Observing that the path does not describe a regular polygon (like a hexagon or octagon) or a circle, but rather a shape with distinct angles and sides.\\n- Expected Outcome: A clearer understanding that the shape has four sides, with two pairs of adjacent sides being potentially unequal, which is characteristic of a kite.\\n\\n**Step 6: Command-to-Geometry Mapping**\\n- Description: Converting the abstract path commands into a geometric shape.\\n- Action: Mapping the path data to visualize a shape with two pairs of adjacent sides that are distinct yet symmetrical, indicative of a kite.\\n- Expected Outcome: A dynamic visualization that evolves to clearly represent a kite shape.\\n\\n**Conclusion**\\n- Description: Determining the shape drawn by the SVG path element.\\n- Action: Reviewing the outcomes of each analysis step, which consistently point towards a four-sided figure with distinct properties of a kite.\\n- Expected Outcome: The correct identification of the shape as a kite (D).'}" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "overall_chain.invoke(\n", + " {\"task_description\": task_example, \"reasoning_modules\": reasoning_modules_str}\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ea8568d5-bdb6-45cd-8d04-1ab305786caa", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c14a291c-7c1b-43bc-807e-11180290985e", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docker/Makefile b/docker/Makefile new file mode 100644 index 0000000000000..d578580c32396 --- /dev/null +++ b/docker/Makefile @@ -0,0 +1,12 @@ +# Makefile + +build_graphdb: + docker build --tag graphdb ./graphdb + +start_graphdb: + docker-compose up -d graphdb + +down: + docker-compose down -v --remove-orphans + +.PHONY: build_graphdb start_graphdb down diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index ce680ccafda51..968e32469a34a 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -15,3 +15,7 @@ services: - "6020:6379" volumes: - ./redis-volume:/data + graphdb: + image: graphdb + ports: + - "6021:7200" diff --git a/docker/graphdb/Dockerfile b/docker/graphdb/Dockerfile new file mode 100644 index 0000000000000..dfcbe7e622d7a --- /dev/null +++ b/docker/graphdb/Dockerfile @@ -0,0 +1,5 @@ +FROM ontotext/graphdb:10.5.1 +RUN mkdir -p /opt/graphdb/dist/data/repositories/langchain +COPY config.ttl /opt/graphdb/dist/data/repositories/langchain/ +COPY graphdb_create.sh /run.sh +ENTRYPOINT bash /run.sh diff --git a/docker/graphdb/config.ttl b/docker/graphdb/config.ttl new file mode 100644 index 0000000000000..dcbdeeebe1283 --- /dev/null +++ b/docker/graphdb/config.ttl @@ -0,0 +1,46 @@ +@prefix rdfs: . +@prefix rep: . +@prefix sr: . +@prefix sail: . +@prefix graphdb: . + +[] a rep:Repository ; + rep:repositoryID "langchain" ; + rdfs:label "" ; + rep:repositoryImpl [ + rep:repositoryType "graphdb:SailRepository" ; + sr:sailImpl [ + sail:sailType "graphdb:Sail" ; + + graphdb:read-only "false" ; + + # Inference and Validation + graphdb:ruleset "empty" ; + graphdb:disable-sameAs "true" ; + graphdb:check-for-inconsistencies "false" ; + + # Indexing + graphdb:entity-id-size "32" ; + graphdb:enable-context-index "false" ; + graphdb:enablePredicateList "true" ; + graphdb:enable-fts-index "false" ; + graphdb:fts-indexes ("default" "iri") ; + graphdb:fts-string-literals-index "default" ; + graphdb:fts-iris-index "none" ; + + # Queries and Updates + graphdb:query-timeout "0" ; + graphdb:throw-QueryEvaluationException-on-timeout "false" ; + graphdb:query-limit-results "0" ; + + # Settable in the file but otherwise hidden in the UI and in the RDF4J console + graphdb:base-URL "http://example.org/owlim#" ; + graphdb:defaultNS "" ; + graphdb:imports "" ; + graphdb:repository-type "file-repository" ; + graphdb:storage-folder "storage" ; + graphdb:entity-index-size "10000000" ; + graphdb:in-memory-literal-properties "true" ; + graphdb:enable-literal-index "true" ; + ] + ]. diff --git a/docker/graphdb/graphdb_create.sh b/docker/graphdb/graphdb_create.sh new file mode 100644 index 0000000000000..52ffe8ad74a06 --- /dev/null +++ b/docker/graphdb/graphdb_create.sh @@ -0,0 +1,28 @@ +#! /bin/bash +REPOSITORY_ID="langchain" +GRAPHDB_URI="http://localhost:7200/" + +echo -e "\nUsing GraphDB: ${GRAPHDB_URI}" + +function startGraphDB { + echo -e "\nStarting GraphDB..." + exec /opt/graphdb/dist/bin/graphdb +} + +function waitGraphDBStart { + echo -e "\nWaiting GraphDB to start..." + for _ in $(seq 1 5); do + CHECK_RES=$(curl --silent --write-out '%{http_code}' --output /dev/null ${GRAPHDB_URI}/rest/repositories) + if [ "${CHECK_RES}" = '200' ]; then + echo -e "\nUp and running" + break + fi + sleep 30s + echo "CHECK_RES: ${CHECK_RES}" + done +} + + +startGraphDB & +waitGraphDBStart +wait diff --git a/docs/api_reference/create_api_rst.py b/docs/api_reference/create_api_rst.py index 9413d90423e2f..65fbb6a4e61c9 100644 --- a/docs/api_reference/create_api_rst.py +++ b/docs/api_reference/create_api_rst.py @@ -14,7 +14,6 @@ ROOT_DIR = Path(__file__).parents[2].absolute() HERE = Path(__file__).parent - ClassKind = Literal["TypedDict", "Regular", "Pydantic", "enum"] @@ -323,30 +322,52 @@ def _package_dir(package_name: str = "langchain") -> Path: def _get_package_version(package_dir: Path) -> str: - with open(package_dir.parent / "pyproject.toml", "r") as f: - pyproject = toml.load(f) + """Return the version of the package.""" + try: + with open(package_dir.parent / "pyproject.toml", "r") as f: + pyproject = toml.load(f) + except FileNotFoundError as e: + print( + f"pyproject.toml not found in {package_dir.parent}.\n" + "You are either attempting to build a directory which is not a package or " + "the package is missing a pyproject.toml file which should be added." + "Aborting the build." + ) + exit(1) return pyproject["tool"]["poetry"]["version"] -def _out_file_path(package_name: str = "langchain") -> Path: +def _out_file_path(package_name: str) -> Path: """Return the path to the file containing the documentation.""" return HERE / f"{package_name.replace('-', '_')}_api_reference.rst" -def _doc_first_line(package_name: str = "langchain") -> str: +def _doc_first_line(package_name: str) -> str: """Return the path to the file containing the documentation.""" return f".. {package_name.replace('-', '_')}_api_reference:\n\n" def main() -> None: """Generate the api_reference.rst file for each package.""" + print("Starting to build API reference files.") for dir in os.listdir(ROOT_DIR / "libs"): + # Skip any hidden directories + # Some of these could be present by mistake in the code base + # e.g., .pytest_cache from running tests from the wrong location. + if not dir.startswith("."): + print("Skipping dir:", dir) + continue + if dir in ("cli", "partners"): continue else: + print("Building package:", dir) _build_rst_file(package_name=dir) - for dir in os.listdir(ROOT_DIR / "libs" / "partners"): + partner_packages = os.listdir(ROOT_DIR / "libs" / "partners") + print("Building partner packages:", partner_packages) + for dir in partner_packages: _build_rst_file(package_name=dir) + print("API reference files built.") if __name__ == "__main__": diff --git a/docs/docs/contributing/documentation.mdx b/docs/docs/contributing/documentation.mdx index d2f2ef4cef717..a7468600b746b 100644 --- a/docs/docs/contributing/documentation.mdx +++ b/docs/docs/contributing/documentation.mdx @@ -17,7 +17,7 @@ Similar to linting, we recognize documentation can be annoying. If you do not wa ### Install dependencies - [Quarto](https://quarto.org) - package that converts Jupyter notebooks (`.ipynb` files) into mdx files for serving in Docusaurus. -- `poetry install` from the monorepo root +- `poetry install --with lint,docs --no-root` from the monorepo root. ### Building @@ -49,7 +49,6 @@ make api_docs_linkcheck The docs are linted from the monorepo root. To lint the docs, run the following from there: ```bash -poetry install --with lint,typing make lint ``` diff --git a/docs/docs/contributing/index.mdx b/docs/docs/contributing/index.mdx index 366664ed039f4..d25212f2862fe 100644 --- a/docs/docs/contributing/index.mdx +++ b/docs/docs/contributing/index.mdx @@ -15,8 +15,9 @@ There are many ways to contribute to LangChain. Here are some common ways people - [**Documentation**](./documentation.mdx): Help improve our docs, including this one! - [**Code**](./code.mdx): Help us write code, fix bugs, or improve our infrastructure. - [**Integrations**](integrations.mdx): Help us integrate with your favorite vendors and tools. +- [**Discussions**](https://github.com/langchain-ai/langchain/discussions): Help answer usage questions and discuss issues with users. -### ๐ŸšฉGitHub Issues +### ๐Ÿšฉ GitHub Issues Our [issues](https://github.com/langchain-ai/langchain/issues) page is kept up to date with bugs, improvements, and feature requests. @@ -31,7 +32,13 @@ We will try to keep these issues as up-to-date as possible, though with the rapid rate of development in this field some may get out of date. If you notice this happening, please let us know. -### ๐Ÿ™‹Getting Help +### ๐Ÿ’ญ GitHub Discussions + +We have a [discussions](https://github.com/langchain-ai/langchain/discussions) page where users can ask usage questions, discuss design decisions, and propose new features. + +If you are able to help answer questions, please do so! This will allow the maintainers to spend more time focused on development and bug fixing. + +### ๐Ÿ™‹ Getting Help Our goal is to have the simplest developer setup possible. Should you experience any difficulty getting setup, please contact a maintainer! Not only do we want to help get you unblocked, but we also want to make sure that the process is diff --git a/docs/docs/contributing/repo_structure.mdx b/docs/docs/contributing/repo_structure.mdx new file mode 100644 index 0000000000000..90f212265c85e --- /dev/null +++ b/docs/docs/contributing/repo_structure.mdx @@ -0,0 +1,54 @@ +--- +sidebar_position: 0.5 +--- +# Repository Structure + +If you plan on contributing to LangChain code or documentation, it can be useful +to understand the high level structure of the repository. + +LangChain is organized as a [monorep](https://en.wikipedia.org/wiki/Monorepo) that contains multiple packages. + +Here's the structure visualized as a tree: + +```text +. +โ”œโ”€โ”€ cookbook # Tutorials and examples +โ”œโ”€โ”€ docs # Contains content for the documentation here: https://python.langchain.com/ +โ”œโ”€โ”€ libs +โ”‚ โ”œโ”€โ”€ langchain # Main package +โ”‚ โ”‚ โ”œโ”€โ”€ tests/unit_tests # Unit tests (present in each package not shown for brevity) +โ”‚ โ”‚ โ”œโ”€โ”€ tests/integration_tests # Integration tests (present in each package not shown for brevity) +โ”‚ โ”œโ”€โ”€ langchain-community # Third-party integrations +โ”‚ โ”œโ”€โ”€ langchain-core # Base interfaces for key abstractions +โ”‚ โ”œโ”€โ”€ langchain-experimental # Experimental components and chains +โ”‚ โ”œโ”€โ”€ partners +โ”‚ โ”œโ”€โ”€ langchain-partner-1 +โ”‚ โ”œโ”€โ”€ langchain-partner-2 +โ”‚ โ”œโ”€โ”€ ... +โ”‚ +โ”œโ”€โ”€ templates # A collection of easily deployable reference architectures for a wide variety of tasks. +``` + +The root directory also contains the following files: + +* `pyproject.toml`: Dependencies for building docs and linting docs, cookbook. +* `Makefile`: A file that contains shortcuts for building, linting and docs and cookbook. + +There are other files in the root directory level, but their presence should be self-explanatory. Feel free to browse around! + +## Documentation + +The `/docs` directory contains the content for the documentation that is shown +at https://python.langchain.com/ and the associated API Reference https://api.python.langchain.com/en/latest/langchain_api_reference.html. + +See the [documentation](./documentation) guidelines to learn how to contribute to the documentation. + +## Code + +The `/libs` directory contains the code for the LangChain packages. + +To learn more about how to contribute code see the following guidelines: + +- [Code](./code.mdx) Learn how to develop in the LangChain codebase. +- [Integrations](./integrations.mdx) to learn how to contribute to third-party integrations to langchain-community or to start a new partner package. +- [Testing](./testing.mdx) guidelines to learn how to write tests for the packages. diff --git a/docs/docs/expression_language/how_to/message_history.ipynb b/docs/docs/expression_language/how_to/message_history.ipynb index 6d93423a96455..aac305d7ac94c 100644 --- a/docs/docs/expression_language/how_to/message_history.ipynb +++ b/docs/docs/expression_language/how_to/message_history.ipynb @@ -7,7 +7,7 @@ "source": [ "# Add message history (memory)\n", "\n", - "The `RunnableWithMessageHistory` let us add message history to certain types of chains.\n", + "The `RunnableWithMessageHistory` lets us add message history to certain types of chains. It wraps another Runnable and manages the chat message history for it.\n", "\n", "Specifically, it can be used for any Runnable that takes as input one of\n", "\n", @@ -21,195 +21,264 @@ "* a sequence of `BaseMessage`\n", "* a dict with a key that contains a sequence of `BaseMessage`\n", "\n", - "Let's take a look at some examples to see how it works." + "Let's take a look at some examples to see how it works. First we construct a runnable (which here accepts a dict as input and returns a message as output):" ] }, { - "cell_type": "markdown", - "id": "6bca45e5-35d9-4603-9ca9-6ac0ce0e35cd", + "cell_type": "code", + "execution_count": 1, + "id": "2ed413b4-33a1-48ee-89b0-2d4917ec101a", "metadata": {}, + "outputs": [], "source": [ - "## Setup\n", + "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", + "from langchain_openai.chat_models import ChatOpenAI\n", "\n", - "We'll use Redis to store our chat message histories and Anthropic's claude-2 model so we'll need to install the following dependencies:" + "model = ChatOpenAI()\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\n", + " \"system\",\n", + " \"You're an assistant who's good at {ability}. Respond in 20 words or fewer\",\n", + " ),\n", + " MessagesPlaceholder(variable_name=\"history\"),\n", + " (\"human\", \"{input}\"),\n", + " ]\n", + ")\n", + "runnable = prompt | model" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "477d04b3-c2b6-4ba5-962f-492c0d625cd5", + "cell_type": "markdown", + "id": "9fd175e1-c7b8-4929-a57e-3331865fe7aa", "metadata": {}, - "outputs": [], "source": [ - "%pip install --upgrade --quiet langchain redis anthropic" + "To manage the message history, we will need:\n", + "1. This runnable;\n", + "2. A callable that returns an instance of `BaseChatMessageHistory`.\n", + "\n", + "Check out the [memory integrations](https://integrations.langchain.com/memory) page for implementations of chat message histories using Redis and other providers. Here we demonstrate using an in-memory `ChatMessageHistory` as well as more persistent storage using `RedisChatMessageHistory`." ] }, { "cell_type": "markdown", - "id": "93776323-d6b8-4912-bb6a-867c5e655f46", + "id": "3d83adad-9672-496d-9f25-5747e7b8c8bb", "metadata": {}, "source": [ - "Set your [Anthropic API key](https://console.anthropic.com/):" + "## In-memory\n", + "\n", + "Below we show a simple example in which the chat history lives in memory, in this case via a global Python dict.\n", + "\n", + "We construct a callable `get_session_history` that references this dict to return an instance of `ChatMessageHistory`. The arguments to the callable can be specified by passing a configuration to the `RunnableWithMessageHistory` at runtime. By default, the configuration parameter is expected to be a single string `session_id`. This can be adjusted via the `history_factory_config` kwarg.\n", + "\n", + "Using the single-parameter default:" ] }, { "cell_type": "code", - "execution_count": null, - "id": "c7f56f69-d2f1-4a21-990c-b5551eb012fa", + "execution_count": 2, + "id": "54348d02-d8ee-440c-bbf9-41bc0fbbc46c", "metadata": {}, "outputs": [], "source": [ - "import getpass\n", - "import os\n", + "from langchain_community.chat_message_histories import ChatMessageHistory\n", + "from langchain_core.chat_history import BaseChatMessageHistory\n", + "from langchain_core.runnables.history import RunnableWithMessageHistory\n", + "\n", + "store = {}\n", "\n", - "os.environ[\"ANTHROPIC_API_KEY\"] = getpass.getpass()" + "\n", + "def get_session_history(session_id: str) -> BaseChatMessageHistory:\n", + " if session_id not in store:\n", + " store[session_id] = ChatMessageHistory()\n", + " return store[session_id]\n", + "\n", + "\n", + "with_message_history = RunnableWithMessageHistory(\n", + " runnable,\n", + " get_session_history,\n", + " input_messages_key=\"input\",\n", + " history_messages_key=\"history\",\n", + ")" ] }, { "cell_type": "markdown", - "id": "6a0ec9e0-7b1c-4c6f-b570-e61d520b47c6", + "id": "01acb505-3fd3-4ab4-9f04-5ea07e81542e", "metadata": {}, "source": [ - "Start a local Redis Stack server if we don't have an existing Redis deployment to connect to:\n", - "```bash\n", - "docker run -d -p 6379:6379 -p 8001:8001 redis/redis-stack:latest\n", - "```" + "Note that we've specified `input_messages_key` (the key to be treated as the latest input message) and `history_messages_key` (the key to add historical messages to).\n", + "\n", + "When invoking this new runnable, we specify the corresponding chat history via a configuration parameter:" ] }, { "cell_type": "code", - "execution_count": 1, - "id": "cd6a250e-17fe-4368-a39d-1fe6b2cbde68", + "execution_count": 3, + "id": "01384412-f08e-4634-9edb-3f46f475b582", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content='Cosine is a trigonometric function that calculates the ratio of the adjacent side to the hypotenuse of a right triangle.')" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "REDIS_URL = \"redis://localhost:6379/0\"" + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"What does cosine mean?\"},\n", + " config={\"configurable\": {\"session_id\": \"abc123\"}},\n", + ")" ] }, { - "cell_type": "markdown", - "id": "36f43b87-655c-4f64-aa7b-bd8c1955d8e5", + "cell_type": "code", + "execution_count": 4, + "id": "954688a2-9a3f-47ee-a9e8-fa0c83e69477", "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content='Cosine is a mathematical function used to calculate the length of a side in a right triangle.')" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "### [LangSmith](/docs/langsmith)\n", - "\n", - "LangSmith is especially useful for something like message history injection, where it can be hard to otherwise understand what the inputs are to various parts of the chain.\n", - "\n", - "Note that LangSmith is not needed, but it is helpful.\n", - "If you do want to use LangSmith, after you sign up at the link above, make sure to uncoment the below and set your environment variables to start logging traces:" + "# Remembers\n", + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"What?\"},\n", + " config={\"configurable\": {\"session_id\": \"abc123\"}},\n", + ")" ] }, { "cell_type": "code", - "execution_count": 2, - "id": "2afc1556-8da1-4499-ba11-983b66c58b18", + "execution_count": 5, + "id": "39350d7c-2641-4744-bc2a-fd6a57c4ea90", "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content='I can help with math problems. What do you need assistance with?')" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"\n", - "# os.environ[\"LANGCHAIN_API_KEY\"] = getpass.getpass()" + "# New session_id --> does not remember.\n", + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"What?\"},\n", + " config={\"configurable\": {\"session_id\": \"def234\"}},\n", + ")" ] }, { "cell_type": "markdown", - "id": "1a5a632e-ba9e-4488-b586-640ad5494f62", + "id": "d29497be-3366-408d-bbb9-d4a8bf4ef37c", "metadata": {}, "source": [ - "## Example: Dict input, message output\n", - "\n", - "Let's create a simple chain that takes a dict as input and returns a BaseMessage.\n", - "\n", - "In this case the `\"question\"` key in the input represents our input message, and the `\"history\"` key is where our historical messages will be injected." + "The configuration parameters by which we track message histories can be customized by passing in a list of ``ConfigurableFieldSpec`` objects to the ``history_factory_config`` parameter. Below, we use two parameters: a `user_id` and `conversation_id`." ] }, { "cell_type": "code", - "execution_count": 2, - "id": "2a150d6f-8878-4950-8634-a608c5faad56", + "execution_count": 6, + "id": "1c89daee-deff-4fdf-86a3-178f7d8ef536", "metadata": {}, "outputs": [], "source": [ - "from typing import Optional\n", + "from langchain_core.runnables import ConfigurableFieldSpec\n", "\n", - "from langchain_community.chat_message_histories import RedisChatMessageHistory\n", - "from langchain_community.chat_models import ChatAnthropic\n", - "from langchain_core.chat_history import BaseChatMessageHistory\n", - "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", - "from langchain_core.runnables.history import RunnableWithMessageHistory" + "store = {}\n", + "\n", + "\n", + "def get_session_history(user_id: str, conversation_id: str) -> BaseChatMessageHistory:\n", + " if (user_id, conversation_id) not in store:\n", + " store[(user_id, conversation_id)] = ChatMessageHistory()\n", + " return store[(user_id, conversation_id)]\n", + "\n", + "\n", + "with_message_history = RunnableWithMessageHistory(\n", + " runnable,\n", + " get_session_history,\n", + " input_messages_key=\"input\",\n", + " history_messages_key=\"history\",\n", + " history_factory_config=[\n", + " ConfigurableFieldSpec(\n", + " id=\"user_id\",\n", + " annotation=str,\n", + " name=\"User ID\",\n", + " description=\"Unique identifier for the user.\",\n", + " default=\"\",\n", + " is_shared=True,\n", + " ),\n", + " ConfigurableFieldSpec(\n", + " id=\"conversation_id\",\n", + " annotation=str,\n", + " name=\"Conversation ID\",\n", + " description=\"Unique identifier for the conversation.\",\n", + " default=\"\",\n", + " is_shared=True,\n", + " ),\n", + " ],\n", + ")" ] }, { "cell_type": "code", - "execution_count": 3, - "id": "3185edba-4eb6-4b32-80c6-577c0d19af97", + "execution_count": null, + "id": "65c5622e-09b8-4f2f-8c8a-2dab0fd040fa", "metadata": {}, "outputs": [], "source": [ - "prompt = ChatPromptTemplate.from_messages(\n", - " [\n", - " (\"system\", \"You're an assistant who's good at {ability}\"),\n", - " MessagesPlaceholder(variable_name=\"history\"),\n", - " (\"human\", \"{question}\"),\n", - " ]\n", - ")\n", - "\n", - "chain = prompt | ChatAnthropic(model=\"claude-2\")" + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"Hello\"},\n", + " config={\"configurable\": {\"user_id\": \"123\", \"conversation_id\": \"1\"}},\n", + ")" ] }, { "cell_type": "markdown", - "id": "f9d81796-ce61-484c-89e2-6c567d5e54ef", + "id": "18f1a459-3f88-4ee6-8542-76a907070dd6", "metadata": {}, "source": [ - "### Adding message history\n", + "### Examples with runnables of different signatures\n", "\n", - "To add message history to our original chain we wrap it in the `RunnableWithMessageHistory` class.\n", - "\n", - "Crucially, we also need to define a method that takes a session_id string and based on it returns a `BaseChatMessageHistory`. Given the same input, this method should return an equivalent output.\n", - "\n", - "In this case we'll also want to specify `input_messages_key` (the key to be treated as the latest input message) and `history_messages_key` (the key to add historical messages to)." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "ca7c64d8-e138-4ef8-9734-f82076c47d80", - "metadata": {}, - "outputs": [], - "source": [ - "chain_with_history = RunnableWithMessageHistory(\n", - " chain,\n", - " lambda session_id: RedisChatMessageHistory(session_id, url=REDIS_URL),\n", - " input_messages_key=\"question\",\n", - " history_messages_key=\"history\",\n", - ")" + "The above runnable takes a dict as input and returns a BaseMessage. Below we show some alternatives." ] }, { "cell_type": "markdown", - "id": "37eefdec-9901-4650-b64c-d3c097ed5f4d", + "id": "48eae1bf-b59d-4a61-8e62-b6dbf667e866", "metadata": {}, "source": [ - "## Invoking with config\n", - "\n", - "Whenever we call our chain with message history, we need to include a config that contains the `session_id`\n", - "```python\n", - "config={\"configurable\": {\"session_id\": \"\"}}\n", - "```\n", - "\n", - "Given the same configuration, our chain should be pulling from the same chat message history." + "#### Messages input, dict output" ] }, { "cell_type": "code", "execution_count": 7, - "id": "a85bcc22-ca4c-4ad5-9440-f94be7318f3e", + "id": "17733d4f-3a32-4055-9d44-5d58b9446a26", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "AIMessage(content=' Cosine is one of the basic trigonometric functions in mathematics. It is defined as the ratio of the adjacent side to the hypotenuse in a right triangle.\\n\\nSome key properties and facts about cosine:\\n\\n- It is denoted by cos(ฮธ), where ฮธ is the angle in a right triangle. \\n\\n- The cosine of an acute angle is always positive. For angles greater than 90 degrees, cosine can be negative.\\n\\n- Cosine is one of the three main trig functions along with sine and tangent.\\n\\n- The cosine of 0 degrees is 1. As the angle increases towards 90 degrees, the cosine value decreases towards 0.\\n\\n- The range of values for cosine is -1 to 1.\\n\\n- The cosine function maps angles in a circle to the x-coordinate on the unit circle.\\n\\n- Cosine is used to find adjacent side lengths in right triangles, and has many other applications in mathematics, physics, engineering and more.\\n\\n- Key cosine identities include: cos(A+B) = cosAcosB โˆ’ sinAsinB and cos(2A) = cos^2(A) โˆ’ sin^2(A)\\n\\nSo in summary, cosine is a fundamental trig')" + "{'output_message': AIMessage(content=\"Simone de Beauvoir believed in the existence of free will. She argued that individuals have the ability to make choices and determine their own actions, even in the face of social and cultural constraints. She rejected the idea that individuals are purely products of their environment or predetermined by biology or destiny. Instead, she emphasized the importance of personal responsibility and the need for individuals to actively engage in creating their own lives and defining their own existence. De Beauvoir believed that freedom and agency come from recognizing one's own freedom and actively exercising it in the pursuit of personal and collective liberation.\")}" ] }, "execution_count": 7, @@ -218,22 +287,40 @@ } ], "source": [ - "chain_with_history.invoke(\n", - " {\"ability\": \"math\", \"question\": \"What does cosine mean?\"},\n", - " config={\"configurable\": {\"session_id\": \"foobar\"}},\n", + "from langchain_core.messages import HumanMessage\n", + "from langchain_core.runnables import RunnableParallel\n", + "\n", + "chain = RunnableParallel({\"output_message\": ChatOpenAI()})\n", + "\n", + "\n", + "def get_session_history(session_id: str) -> BaseChatMessageHistory:\n", + " if session_id not in store:\n", + " store[session_id] = ChatMessageHistory()\n", + " return store[session_id]\n", + "\n", + "\n", + "with_message_history = RunnableWithMessageHistory(\n", + " chain,\n", + " get_session_history,\n", + " output_messages_key=\"output_message\",\n", + ")\n", + "\n", + "with_message_history.invoke(\n", + " [HumanMessage(content=\"What did Simone de Beauvoir believe about free will\")],\n", + " config={\"configurable\": {\"session_id\": \"baz\"}},\n", ")" ] }, { "cell_type": "code", "execution_count": 8, - "id": "ab29abd3-751f-41ce-a1b0-53f6b565e79d", + "id": "efb57ef5-91f9-426b-84b9-b77f071a9dd7", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "AIMessage(content=' The inverse of the cosine function is called the arccosine or inverse cosine, often denoted as cos-1(x) or arccos(x).\\n\\nThe key properties and facts about arccosine:\\n\\n- It is defined as the angle ฮธ between 0 and ฯ€ radians whose cosine is x. So arccos(x) = ฮธ such that cos(ฮธ) = x.\\n\\n- The range of arccosine is 0 to ฯ€ radians (0 to 180 degrees).\\n\\n- The domain of arccosine is -1 to 1. \\n\\n- arccos(cos(ฮธ)) = ฮธ for values of ฮธ from 0 to ฯ€ radians.\\n\\n- arccos(x) is the angle in a right triangle whose adjacent side is x and hypotenuse is 1.\\n\\n- arccos(0) = 90 degrees. As x increases from 0 to 1, arccos(x) decreases from 90 to 0 degrees.\\n\\n- arccos(1) = 0 degrees. arccos(-1) = 180 degrees.\\n\\n- The graph of y = arccos(x) is part of the unit circle, restricted to x')" + "{'output_message': AIMessage(content='Simone de Beauvoir\\'s views on free will were closely aligned with those of her contemporary and partner Jean-Paul Sartre. Both de Beauvoir and Sartre were existentialist philosophers who emphasized the importance of individual freedom and the rejection of determinism. They believed that human beings have the capacity to transcend their circumstances and create their own meaning and values.\\n\\nSartre, in his famous work \"Being and Nothingness,\" argued that human beings are condemned to be free, meaning that we are burdened with the responsibility of making choices and defining ourselves in a world that lacks inherent meaning. Like de Beauvoir, Sartre believed that individuals have the ability to exercise their freedom and make choices in the face of external and internal constraints.\\n\\nWhile there may be some nuanced differences in their philosophical writings, overall, de Beauvoir and Sartre shared a similar belief in the existence of free will and the importance of individual agency in shaping one\\'s own life.')}" ] }, "execution_count": 8, @@ -242,149 +329,250 @@ } ], "source": [ - "chain_with_history.invoke(\n", - " {\"ability\": \"math\", \"question\": \"What's its inverse\"},\n", - " config={\"configurable\": {\"session_id\": \"foobar\"}},\n", + "with_message_history.invoke(\n", + " [HumanMessage(content=\"How did this compare to Sartre\")],\n", + " config={\"configurable\": {\"session_id\": \"baz\"}},\n", ")" ] }, { "cell_type": "markdown", - "id": "da3d1feb-b4bb-4624-961c-7db2e1180df7", + "id": "a39eac5f-a9d8-4729-be06-5e7faf0c424d", "metadata": {}, "source": [ - ":::tip\n", + "#### Messages input, messages output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e45bcd95-e31f-4a9a-967a-78f96e8da881", + "metadata": {}, + "outputs": [], + "source": [ + "RunnableWithMessageHistory(\n", + " ChatOpenAI(),\n", + " get_session_history,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "04daa921-a2d1-40f9-8cd1-ae4e9a4163a7", + "metadata": {}, + "source": [ + "#### Dict with single key for all messages input, messages output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27157f15-9fb0-4167-9870-f4d7f234b3cb", + "metadata": {}, + "outputs": [], + "source": [ + "from operator import itemgetter\n", "\n", - "[Langsmith trace](https://smith.langchain.com/public/863a003b-7ca8-4b24-be9e-d63ec13c106e/r)\n", + "RunnableWithMessageHistory(\n", + " itemgetter(\"input_messages\") | ChatOpenAI(),\n", + " get_session_history,\n", + " input_messages_key=\"input_messages\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "418ca7af-9ed9-478c-8bca-cba0de2ca61e", + "metadata": {}, + "source": [ + "## Persistent storage" + ] + }, + { + "cell_type": "markdown", + "id": "76799a13-d99a-4c4f-91f2-db699e40b8df", + "metadata": {}, + "source": [ + "In many cases it is preferable to persist conversation histories. `RunnableWithMessageHistory` is agnostic as to how the `get_session_history` callable retrieves its chat message histories. See [here](https://github.com/langchain-ai/langserve/blob/main/examples/chat_with_persistence_and_user/server.py) for an example using a local filesystem. Below we demonstrate how one could use Redis. Check out the [memory integrations](https://integrations.langchain.com/memory) page for implementations of chat message histories using other providers." + ] + }, + { + "cell_type": "markdown", + "id": "6bca45e5-35d9-4603-9ca9-6ac0ce0e35cd", + "metadata": {}, + "source": [ + "### Setup\n", "\n", - ":::" + "We'll need to install Redis if it's not installed already:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "477d04b3-c2b6-4ba5-962f-492c0d625cd5", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet redis" ] }, { "cell_type": "markdown", - "id": "61d5115e-64a1-4ad5-b676-8afd4ef6093e", + "id": "6a0ec9e0-7b1c-4c6f-b570-e61d520b47c6", "metadata": {}, "source": [ - "Looking at the Langsmith trace for the second call, we can see that when constructing the prompt, a \"history\" variable has been injected which is a list of two messages (our first input and first output)." + "Start a local Redis Stack server if we don't have an existing Redis deployment to connect to:\n", + "```bash\n", + "docker run -d -p 6379:6379 -p 8001:8001 redis/redis-stack:latest\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "cd6a250e-17fe-4368-a39d-1fe6b2cbde68", + "metadata": {}, + "outputs": [], + "source": [ + "REDIS_URL = \"redis://localhost:6379/0\"" ] }, { "cell_type": "markdown", - "id": "028cf151-6cd5-4533-b3cf-c8d735554647", + "id": "36f43b87-655c-4f64-aa7b-bd8c1955d8e5", "metadata": {}, "source": [ - "## Example: messages input, dict output" + "### [LangSmith](/docs/langsmith)\n", + "\n", + "LangSmith is especially useful for something like message history injection, where it can be hard to otherwise understand what the inputs are to various parts of the chain.\n", + "\n", + "Note that LangSmith is not needed, but it is helpful.\n", + "If you do want to use LangSmith, after you sign up at the link above, make sure to uncoment the below and set your environment variables to start logging traces:" ] }, { "cell_type": "code", - "execution_count": 14, - "id": "0bb446b5-6251-45fe-a92a-4c6171473c53", + "execution_count": 2, + "id": "2afc1556-8da1-4499-ba11-983b66c58b18", + "metadata": {}, + "outputs": [], + "source": [ + "# os.environ[\"LANGCHAIN_TRACING_V2\"] = \"true\"\n", + "# os.environ[\"LANGCHAIN_API_KEY\"] = getpass.getpass()" + ] + }, + { + "cell_type": "markdown", + "id": "f9d81796-ce61-484c-89e2-6c567d5e54ef", + "metadata": {}, + "source": [ + "Updating the message history implementation just requires us to define a new callable, this time returning an instance of `RedisChatMessageHistory`:" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "ca7c64d8-e138-4ef8-9734-f82076c47d80", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.chat_message_histories import RedisChatMessageHistory\n", + "\n", + "\n", + "def get_message_history(session_id: str) -> RedisChatMessageHistory:\n", + " return RedisChatMessageHistory(session_id, url=REDIS_URL)\n", + "\n", + "\n", + "with_message_history = RunnableWithMessageHistory(\n", + " runnable,\n", + " get_message_history,\n", + " input_messages_key=\"input\",\n", + " history_messages_key=\"history\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "37eefdec-9901-4650-b64c-d3c097ed5f4d", + "metadata": {}, + "source": [ + "We can invoke as before:" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "a85bcc22-ca4c-4ad5-9440-f94be7318f3e", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "{'output_message': AIMessage(content=' Here is a summary of Simone de Beauvoir\\'s views on free will:\\n\\n- De Beauvoir was an existentialist philosopher and believed strongly in the concept of free will. She rejected the idea that human nature or instincts determine behavior.\\n\\n- Instead, de Beauvoir argued that human beings define their own essence or nature through their actions and choices. As she famously wrote, \"One is not born, but rather becomes, a woman.\"\\n\\n- De Beauvoir believed that while individuals are situated in certain cultural contexts and social conditions, they still have agency and the ability to transcend these situations. Freedom comes from choosing one\\'s attitude toward these constraints.\\n\\n- She emphasized the radical freedom and responsibility of the individual. We are \"condemned to be free\" because we cannot escape making choices and taking responsibility for our choices. \\n\\n- De Beauvoir felt that many people evade their freedom and responsibility by adopting rigid mindsets, ideologies, or conforming uncritically to social roles.\\n\\n- She advocated for the recognition of ambiguity in the human condition and warned against the quest for absolute rules that deny freedom and responsibility. Authentic living involves embracing ambiguity.\\n\\nIn summary, de Beauvoir promoted an existential ethics')}" + "AIMessage(content='Cosine is a trigonometric function that represents the ratio of the adjacent side to the hypotenuse in a right triangle.')" ] }, - "execution_count": 14, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "from langchain_core.messages import HumanMessage\n", - "from langchain_core.runnables import RunnableParallel\n", - "\n", - "chain = RunnableParallel({\"output_message\": ChatAnthropic(model=\"claude-2\")})\n", - "chain_with_history = RunnableWithMessageHistory(\n", - " chain,\n", - " lambda session_id: RedisChatMessageHistory(session_id, url=REDIS_URL),\n", - " output_messages_key=\"output_message\",\n", - ")\n", - "\n", - "chain_with_history.invoke(\n", - " [HumanMessage(content=\"What did Simone de Beauvoir believe about free will\")],\n", - " config={\"configurable\": {\"session_id\": \"baz\"}},\n", + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"What does cosine mean?\"},\n", + " config={\"configurable\": {\"session_id\": \"foobar\"}},\n", ")" ] }, { "cell_type": "code", - "execution_count": 16, - "id": "601ce3ff-aea8-424d-8e54-fd614256af4f", + "execution_count": 12, + "id": "ab29abd3-751f-41ce-a1b0-53f6b565e79d", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "{'output_message': AIMessage(content=\" There are many similarities between Simone de Beauvoir's views on free will and those of Jean-Paul Sartre, though some key differences emerge as well:\\n\\nSimilarities with Sartre:\\n\\n- Both were existentialist thinkers who rejected determinism and emphasized human freedom and responsibility.\\n\\n- They agreed that existence precedes essence - there is no predefined human nature that determines who we are.\\n\\n- Individuals must define themselves through their choices and actions. This leads to anxiety but also freedom.\\n\\n- The human condition is characterized by ambiguity and uncertainty, rather than fixed meanings/values.\\n\\n- Both felt that most people evade their freedom through self-deception, conformity, or adopting collective identities/values uncritically.\\n\\nDifferences from Sartre: \\n\\n- Sartre placed more emphasis on the burden and anguish of radical freedom. De Beauvoir focused more on its positive potential.\\n\\n- De Beauvoir critiqued Sartre's premise that human relations are necessarily conflictual. She saw more potential for mutual recognition.\\n\\n- Sartre saw the Other's gaze as a threat to freedom. De Beauvoir put more stress on how the Other's gaze can confirm\")}" + "AIMessage(content='The inverse of cosine is the arccosine function, denoted as acos or cos^-1, which gives the angle corresponding to a given cosine value.')" ] }, - "execution_count": 16, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "chain_with_history.invoke(\n", - " [HumanMessage(content=\"How did this compare to Sartre\")],\n", - " config={\"configurable\": {\"session_id\": \"baz\"}},\n", + "with_message_history.invoke(\n", + " {\"ability\": \"math\", \"input\": \"What's its inverse\"},\n", + " config={\"configurable\": {\"session_id\": \"foobar\"}},\n", ")" ] }, { "cell_type": "markdown", - "id": "b898d1b1-11e6-4d30-a8dd-cc5e45533611", + "id": "da3d1feb-b4bb-4624-961c-7db2e1180df7", "metadata": {}, "source": [ ":::tip\n", "\n", - "[LangSmith trace](https://smith.langchain.com/public/f6c3e1d1-a49d-4955-a9fa-c6519df74fa7/r)\n", + "[Langsmith trace](https://smith.langchain.com/public/bd73e122-6ec1-48b2-82df-e6483dc9cb63/r)\n", "\n", ":::" ] }, { "cell_type": "markdown", - "id": "1724292c-01c6-44bb-83e8-9cdb6bf01483", - "metadata": {}, - "source": [ - "## More examples\n", - "\n", - "We could also do any of the below:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "fd89240b-5a25-48f8-9568-5c1127f9ffad", + "id": "61d5115e-64a1-4ad5-b676-8afd4ef6093e", "metadata": {}, - "outputs": [], "source": [ - "from operator import itemgetter\n", - "\n", - "# messages in, messages out\n", - "RunnableWithMessageHistory(\n", - " ChatAnthropic(model=\"claude-2\"),\n", - " lambda session_id: RedisChatMessageHistory(session_id, url=REDIS_URL),\n", - ")\n", - "\n", - "# dict with single key for all messages in, messages out\n", - "RunnableWithMessageHistory(\n", - " itemgetter(\"input_messages\") | ChatAnthropic(model=\"claude-2\"),\n", - " lambda session_id: RedisChatMessageHistory(session_id, url=REDIS_URL),\n", - " input_messages_key=\"input_messages\",\n", - ")" + "Looking at the Langsmith trace for the second call, we can see that when constructing the prompt, a \"history\" variable has been injected which is a list of two messages (our first input and first output)." ] } ], "metadata": { "kernelspec": { - "display_name": "poetry-venv", + "display_name": "Python 3 (ipykernel)", "language": "python", - "name": "poetry-venv" + "name": "python3" }, "language_info": { "codemirror_mode": { @@ -396,7 +584,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/docs/docs/get_started/quickstart.mdx b/docs/docs/get_started/quickstart.mdx index f5d43e02a225a..d8a9f1e732e14 100644 --- a/docs/docs/get_started/quickstart.mdx +++ b/docs/docs/get_started/quickstart.mdx @@ -193,7 +193,7 @@ After that, we can import and use WebBaseLoader. ```python from langchain_community.document_loaders import WebBaseLoader -loader = WebBaseLoader("https://docs.smith.langchain.com/overview") +loader = WebBaseLoader("https://docs.smith.langchain.com") docs = loader.load() ``` diff --git a/docs/docs/integrations/chat/ai21.ipynb b/docs/docs/integrations/chat/ai21.ipynb new file mode 100644 index 0000000000000..45f4a969bf508 --- /dev/null +++ b/docs/docs/integrations/chat/ai21.ipynb @@ -0,0 +1,141 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "4cebeec0", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: AI21 Labs\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "e49f1e0d", + "metadata": {}, + "source": [ + "# ChatAI21\n", + "\n", + "This notebook covers how to get started with AI21 chat models.\n", + "\n", + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c3bef91", + "metadata": { + "ExecuteTime": { + "end_time": "2024-02-15T06:50:44.929635Z", + "start_time": "2024-02-15T06:50:41.209704Z" + } + }, + "outputs": [], + "source": [ + "!pip install -qU langchain-ai21" + ] + }, + { + "cell_type": "markdown", + "id": "2b4f3e15", + "metadata": {}, + "source": [ + "## Environment Setup\n", + "\n", + "We'll need to get a [AI21 API key](https://docs.ai21.com/) and set the `AI21_API_KEY` environment variable:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62e0dbc3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"AI21_API_KEY\"] = getpass()" + ] + }, + { + "cell_type": "markdown", + "id": "4828829d3da430ce", + "metadata": { + "collapsed": false + }, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "39353473fce5dd2e", + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "AIMessage(content='Bonjour, comment vas-tu?')" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_ai21 import ChatAI21\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "\n", + "chat = ChatAI21(model=\"j2-ultra\")\n", + "\n", + "prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\"system\", \"You are a helpful assistant that translates English to French.\"),\n", + " (\"human\", \"Translate this sentence from English to French. {english_text}.\"),\n", + " ]\n", + ")\n", + "\n", + "chain = prompt | chat\n", + "chain.invoke({\"english_text\": \"Hello, how are you?\"})" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c159a79f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/chat/anthropic_functions.ipynb b/docs/docs/integrations/chat/anthropic_functions.ipynb index 5f6d116e10b49..6b2a031f4565f 100644 --- a/docs/docs/integrations/chat/anthropic_functions.ipynb +++ b/docs/docs/integrations/chat/anthropic_functions.ipynb @@ -15,16 +15,7 @@ "execution_count": 1, "id": "378be79b", "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/harrisonchase/.pyenv/versions/3.9.1/envs/langchain/lib/python3.9/site-packages/deeplake/util/check_latest_version.py:32: UserWarning: A newer version of deeplake (3.6.14) is available. It's recommended that you update to the latest version using `pip install -U deeplake`.\n", - " warnings.warn(\n" - ] - } - ], + "outputs": [], "source": [ "from langchain_experimental.llms.anthropic_functions import AnthropicFunctions" ] @@ -41,7 +32,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "id": "e1d535f6", "metadata": {}, "outputs": [], @@ -102,7 +93,7 @@ "metadata": {}, "outputs": [], "source": [ - "response = model.predict_messages(\n", + "response = model.invoke(\n", " [HumanMessage(content=\"whats the weater in boston?\")], functions=functions\n", ")" ] @@ -140,7 +131,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 4, "id": "7af5c567", "metadata": {}, "outputs": [], @@ -162,7 +153,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "id": "bd01082a", "metadata": {}, "outputs": [], @@ -172,24 +163,12 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "id": "b5a23e9f", "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[{'name': 'Alex', 'height': '5', 'hair_color': 'blonde'},\n", - " {'name': 'Claudia', 'height': '6', 'hair_color': 'brunette'}]" - ] - }, - "execution_count": 10, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "chain.run(inp)" + "chain.invoke(inp)" ] }, { @@ -256,7 +235,7 @@ } ], "source": [ - "chain.run(\"this is really cool\")" + "chain.invoke(\"this is really cool\")" ] } ], @@ -276,7 +255,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.1" + "version": "3.9.0" } }, "nbformat": 4, diff --git a/docs/docs/integrations/chat/google_generative_ai.ipynb b/docs/docs/integrations/chat/google_generative_ai.ipynb index c8544163a4f4c..f06b250755de6 100644 --- a/docs/docs/integrations/chat/google_generative_ai.ipynb +++ b/docs/docs/integrations/chat/google_generative_ai.ipynb @@ -320,20 +320,51 @@ "4. Message may be blocked if they violate the safety checks of the LLM. In this case, the model will return an empty response." ] }, + { + "cell_type": "markdown", + "id": "54793b9e", + "metadata": {}, + "source": [ + "### Safety Settings\n", + "\n", + "Gemini models have default safety settings that can be overridden. If you are receiving lots of \"Safety Warnings\" from your models, you can try tweaking the `safety_settings` attribute of the model. For example, to turn off safety blocking for dangerous content, you can construct your LLM as follows:" + ] + }, { "cell_type": "code", "execution_count": null, "id": "75fdfad6", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "from langchain_google_genai import (\n", + " ChatGoogleGenerativeAI,\n", + " HarmBlockThreshold,\n", + " HarmCategory,\n", + ")\n", + "\n", + "llm = ChatGoogleGenerativeAI(\n", + " model=\"gemini-pro\",\n", + " safety_settings={\n", + " HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE,\n", + " },\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e68e203d", + "metadata": {}, + "source": [ + "For an enumeration of the categories and thresholds available, see Google's [safety setting types](https://ai.google.dev/api/python/google/generativeai/types/SafetySettingDict)." + ] }, { "cell_type": "markdown", "id": "92b5aca5", "metadata": {}, "source": [ - "## Additional Configuraation\n", + "## Additional Configuration\n", "\n", "You can pass the following parameters to ChatGoogleGenerativeAI in order to customize the SDK's behavior:\n", "\n", diff --git a/docs/docs/integrations/chat/yuan2.ipynb b/docs/docs/integrations/chat/yuan2.ipynb new file mode 100644 index 0000000000000..e3388fd8cb8ca --- /dev/null +++ b/docs/docs/integrations/chat/yuan2.ipynb @@ -0,0 +1,463 @@ +{ + "cells": [ + { + "cell_type": "raw", + "source": [ + "---\n", + "sidebar_label: YUAN2\n", + "---" + ], + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% raw\n" + } + } + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "# YUAN2.0\n", + "\n", + "This notebook shows how to use [YUAN2 API](https://github.com/IEIT-Yuan/Yuan-2.0/blob/main/docs/inference_server.md) in LangChain with the langchain.chat_models.ChatYuan2.\n", + "\n", + "[*Yuan2.0*](https://github.com/IEIT-Yuan/Yuan-2.0/blob/main/README-EN.md) is a new generation Fundamental Large Language Model developed by IEIT System. We have published all three models, Yuan 2.0-102B, Yuan 2.0-51B, and Yuan 2.0-2B. And we provide relevant scripts for pretraining, fine-tuning, and inference services for other developers. Yuan2.0 is based on Yuan1.0, utilizing a wider range of high-quality pre training data and instruction fine-tuning datasets to enhance the model's understanding of semantics, mathematics, reasoning, code, knowledge, and other aspects." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "## Getting started\n", + "### Installation\n", + "First, Yuan2.0 provided an OpenAI compatible API, and we integrate ChatYuan2 into langchain chat model by using OpenAI client.\n", + "Therefore, ensure the openai package is installed in your Python environment. Run the following command:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet openai" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Importing the Required Modules\n", + "After installation, import the necessary modules to your Python script:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "from langchain_community.chat_models import ChatYuan2\n", + "from langchain_core.messages import AIMessage, HumanMessage, SystemMessage" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Setting Up Your API server\n", + "Setting up your OpenAI compatible API server following [yuan2 openai api server](https://github.com/IEIT-Yuan/Yuan-2.0/blob/main/README-EN.md).\n", + "If you deployed api server locally, you can simply set `api_key=\"EMPTY\"` or anything you want.\n", + "Just make sure, the `api_base` is set correctly." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "yuan2_api_key = \"your_api_key\"\n", + "yuan2_api_base = \"http://127.0.0.1:8001/v1\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Initialize the ChatYuan2 Model\n", + "Here's how to initialize the chat model:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "chat = ChatYuan2(\n", + " yuan2_api_base=\"http://127.0.0.1:8001/v1\",\n", + " temperature=1.0,\n", + " model_name=\"yuan2\",\n", + " max_retries=3,\n", + " streaming=False,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Basic Usage\n", + "Invoke the model with system and human messages like this:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "name": "#%%\n" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "messages = [\n", + " SystemMessage(content=\"ไฝ ๆ˜ฏไธ€ไธชไบบๅทฅๆ™บ่ƒฝๅŠฉๆ‰‹ใ€‚\"),\n", + " HumanMessage(content=\"ไฝ ๅฅฝ๏ผŒไฝ ๆ˜ฏ่ฐ๏ผŸ\"),\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "print(chat(messages))" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Basic Usage with streaming\n", + "For continuous interaction, use the streaming feature:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", + "\n", + "chat = ChatYuan2(\n", + " yuan2_api_base=\"http://127.0.0.1:8001/v1\",\n", + " temperature=1.0,\n", + " model_name=\"yuan2\",\n", + " max_retries=3,\n", + " streaming=True,\n", + " callbacks=[StreamingStdOutCallbackHandler()],\n", + ")\n", + "messages = [\n", + " SystemMessage(content=\"ไฝ ๆ˜ฏไธชๆ—…ๆธธๅฐๅŠฉๆ‰‹ใ€‚\"),\n", + " HumanMessage(content=\"็ป™ๆˆ‘ไป‹็ปไธ€ไธ‹ๅŒ—ไบฌๆœ‰ๅ“ชไบ›ๅฅฝ็Žฉ็š„ใ€‚\"),\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "chat(messages)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "## Advanced Features\n", + "### Usage with async calls\n", + "\n", + "Invoke the model with non-blocking calls, like this:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "async def basic_agenerate():\n", + " chat = ChatYuan2(\n", + " yuan2_api_base=\"http://127.0.0.1:8001/v1\",\n", + " temperature=1.0,\n", + " model_name=\"yuan2\",\n", + " max_retries=3,\n", + " )\n", + " messages = [\n", + " [\n", + " SystemMessage(content=\"ไฝ ๆ˜ฏไธชๆ—…ๆธธๅฐๅŠฉๆ‰‹ใ€‚\"),\n", + " HumanMessage(content=\"็ป™ๆˆ‘ไป‹็ปไธ€ไธ‹ๅŒ—ไบฌๆœ‰ๅ“ชไบ›ๅฅฝ็Žฉ็š„ใ€‚\"),\n", + " ]\n", + " ]\n", + "\n", + " result = await chat.agenerate(messages)\n", + " print(result)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "import asyncio\n", + "\n", + "asyncio.run(basic_agenerate())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Usage with prompt template\n", + "\n", + "Invoke the model with non-blocking calls and used chat template like this:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "async def ainvoke_with_prompt_template():\n", + " from langchain.prompts.chat import (\n", + " ChatPromptTemplate,\n", + " )\n", + "\n", + " chat = ChatYuan2(\n", + " yuan2_api_base=\"http://127.0.0.1:8001/v1\",\n", + " temperature=1.0,\n", + " model_name=\"yuan2\",\n", + " max_retries=3,\n", + " )\n", + " prompt = ChatPromptTemplate.from_messages(\n", + " [\n", + " (\"system\", \"ไฝ ๆ˜ฏไธ€ไธช่ฏ—ไบบ๏ผŒๆ“…้•ฟๅ†™่ฏ—ใ€‚\"),\n", + " (\"human\", \"็ป™ๆˆ‘ๅ†™้ฆ–่ฏ—๏ผŒไธป้ข˜ๆ˜ฏ{theme}ใ€‚\"),\n", + " ]\n", + " )\n", + " chain = prompt | chat\n", + " result = await chain.ainvoke({\"theme\": \"ๆ˜Žๆœˆ\"})\n", + " print(f\"type(result): {type(result)}; {result}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "asyncio.run(ainvoke_with_prompt_template())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "### Usage with async calls in streaming\n", + "For non-blocking calls with streaming output, use the astream method:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "async def basic_astream():\n", + " chat = ChatYuan2(\n", + " yuan2_api_base=\"http://127.0.0.1:8001/v1\",\n", + " temperature=1.0,\n", + " model_name=\"yuan2\",\n", + " max_retries=3,\n", + " )\n", + " messages = [\n", + " SystemMessage(content=\"ไฝ ๆ˜ฏไธชๆ—…ๆธธๅฐๅŠฉๆ‰‹ใ€‚\"),\n", + " HumanMessage(content=\"็ป™ๆˆ‘ไป‹็ปไธ€ไธ‹ๅŒ—ไบฌๆœ‰ๅ“ชไบ›ๅฅฝ็Žฉ็š„ใ€‚\"),\n", + " ]\n", + " result = chat.astream(messages)\n", + " async for chunk in result:\n", + " print(chunk.content, end=\"\", flush=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "import asyncio\n", + "\n", + "asyncio.run(basic_astream())" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} \ No newline at end of file diff --git a/docs/docs/integrations/chat_loaders/langsmith_dataset.ipynb b/docs/docs/integrations/chat_loaders/langsmith_dataset.ipynb index 5b98b2cc96f3d..85586b03711a4 100644 --- a/docs/docs/integrations/chat_loaders/langsmith_dataset.ipynb +++ b/docs/docs/integrations/chat_loaders/langsmith_dataset.ipynb @@ -55,7 +55,7 @@ "source": [ "## 1. Select a dataset\n", "\n", - "This notebook fine-tunes a model directly on selecting which runs to fine-tune on. You will often curate these from traced runs. You can learn more about LangSmith datasets in the docs [docs](https://docs.smith.langchain.com/evaluation/datasets).\n", + "This notebook fine-tunes a model directly on selecting which runs to fine-tune on. You will often curate these from traced runs. You can learn more about LangSmith datasets in the docs [docs](https://docs.smith.langchain.com/evaluation/concepts#datasets).\n", "\n", "For the sake of this tutorial, we will upload an existing dataset here that you can use." ] diff --git a/docs/docs/integrations/document_loaders/athena.ipynb b/docs/docs/integrations/document_loaders/athena.ipynb new file mode 100644 index 0000000000000..3fd655c0f0a04 --- /dev/null +++ b/docs/docs/integrations/document_loaders/athena.ipynb @@ -0,0 +1,110 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "MwTWzDxYgbrR" + }, + "source": [ + "# Athena\n", + "\n", + "This notebooks goes over how to load documents from AWS Athena" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "F0zaLR3xgWmO" + }, + "outputs": [], + "source": [ + "! pip install boto3" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "076NLjfngoWJ" + }, + "outputs": [], + "source": [ + "from langchain_community.document_loaders.athena import AthenaLoader" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "XpMRQwU9gu44" + }, + "outputs": [], + "source": [ + "database_name = \"my_database\"\n", + "s3_output_path = \"s3://my_bucket/query_results/\"\n", + "query = \"SELECT * FROM my_table\"\n", + "profile_name = \"my_profile\"\n", + "\n", + "loader = AthenaLoader(\n", + " query=query,\n", + " database=database_name,\n", + " s3_output_uri=s3_output_path,\n", + " profile_name=profile_name,\n", + ")\n", + "\n", + "documents = loader.load()\n", + "print(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "5IBapL3ejoEt" + }, + "source": [ + "Example with metadata columns" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "wMx6nI1qjryD" + }, + "outputs": [], + "source": [ + "database_name = \"my_database\"\n", + "s3_output_path = \"s3://my_bucket/query_results/\"\n", + "query = \"SELECT * FROM my_table\"\n", + "profile_name = \"my_profile\"\n", + "metadata_columns = [\"_row\", \"_created_at\"]\n", + "\n", + "loader = AthenaLoader(\n", + " query=query,\n", + " database=database_name,\n", + " s3_output_uri=s3_output_path,\n", + " profile_name=profile_name,\n", + " metadata_columns=metadata_columns,\n", + ")\n", + "\n", + "documents = loader.load()\n", + "print(documents)" + ] + } + ], + "metadata": { + "colab": { + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/integrations/document_loaders/cassandra.ipynb b/docs/docs/integrations/document_loaders/cassandra.ipynb index 49f261a18a84b..b69b1135a2bbe 100644 --- a/docs/docs/integrations/document_loaders/cassandra.ipynb +++ b/docs/docs/integrations/document_loaders/cassandra.ipynb @@ -72,57 +72,72 @@ }, { "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "source": [ "### Init from a cassandra driver Session\n", "\n", "You need to create a `cassandra.cluster.Session` object, as described in the [Cassandra driver documentation](https://docs.datastax.com/en/developer/python-driver/latest/api/cassandra/cluster/#module-cassandra.cluster). The details vary (e.g. with network settings and authentication), but this might be something like:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "outputs": [], "source": [ "from cassandra.cluster import Cluster\n", "\n", "cluster = Cluster()\n", "session = cluster.connect()" - ], - "metadata": { - "collapsed": false - }, - "execution_count": null + ] }, { "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "source": [ "You need to provide the name of an existing keyspace of the Cassandra instance:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "outputs": [], "source": [ "CASSANDRA_KEYSPACE = input(\"CASSANDRA_KEYSPACE = \")" - ], - "metadata": { - "collapsed": false - }, - "execution_count": null + ] }, { "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "source": [ "Creating the document loader:" - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", @@ -144,18 +159,21 @@ }, { "cell_type": "code", - "outputs": [], - "source": [ - "docs = loader.load()" - ], + "execution_count": 17, "metadata": { - "collapsed": false, "ExecuteTime": { "end_time": "2024-01-19T15:47:26.399472Z", "start_time": "2024-01-19T15:47:26.389145Z" + }, + "collapsed": false, + "jupyter": { + "outputs_hidden": false } }, - "execution_count": 17 + "outputs": [], + "source": [ + "docs = loader.load()" + ] }, { "cell_type": "code", @@ -169,7 +187,9 @@ "outputs": [ { "data": { - "text/plain": "Document(page_content='Row(_id=\\'659bdffa16cbc4586b11a423\\', title=\\'Dangerous Men\\', reviewtext=\\'\"Dangerous Men,\" the picture\\\\\\'s production notes inform, took 26 years to reach the big screen. After having seen it, I wonder: What was the rush?\\')', metadata={'table': 'movie_reviews', 'keyspace': 'default_keyspace'})" + "text/plain": [ + "Document(page_content='Row(_id=\\'659bdffa16cbc4586b11a423\\', title=\\'Dangerous Men\\', reviewtext=\\'\"Dangerous Men,\" the picture\\\\\\'s production notes inform, took 26 years to reach the big screen. After having seen it, I wonder: What was the rush?\\')', metadata={'table': 'movie_reviews', 'keyspace': 'default_keyspace'})" + ] }, "execution_count": 19, "metadata": {}, @@ -182,17 +202,27 @@ }, { "cell_type": "markdown", + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "source": [ "### Init from cassio\n", "\n", "It's also possible to use cassio to configure the session and keyspace." - ], - "metadata": { - "collapsed": false - } + ] }, { "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, "outputs": [], "source": [ "import cassio\n", @@ -204,11 +234,16 @@ ")\n", "\n", "docs = loader.load()" - ], - "metadata": { - "collapsed": false - }, - "execution_count": null + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Attribution statement\n", + "\n", + "> Apache Cassandra, Cassandra and Apache are either registered trademarks or trademarks of the [Apache Software Foundation](http://www.apache.org/) in the United States and/or other countries." + ] } ], "metadata": { @@ -233,7 +268,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.18" + "version": "3.9.17" } }, "nbformat": 4, diff --git a/docs/docs/integrations/document_loaders/pebblo.ipynb b/docs/docs/integrations/document_loaders/pebblo.ipynb new file mode 100644 index 0000000000000..40aa7ee6b0c93 --- /dev/null +++ b/docs/docs/integrations/document_loaders/pebblo.ipynb @@ -0,0 +1,88 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Pebblo Safe DocumentLoader\n", + "\n", + "> [Pebblo](https://github.com/daxa-ai/pebblo) enables developers to safely load data and promote their Gen AI app to deployment without worrying about the organizationโ€™s compliance and security requirements. The project identifies semantic topics and entities found in the loaded data and summarizes them on the UI or a PDF report.\n", + "\n", + "Pebblo has two components.\n", + "\n", + "1. Pebblo Safe DocumentLoader for Langchain\n", + "1. Pebblo Daemon\n", + "\n", + "This document describes how to augment your existing Langchain DocumentLoader with Pebblo Safe DocumentLoader to get deep data visibility on the types of Topics and Entities ingested into the Gen-AI Langchain application. For details on `Pebblo Daemon` see this [pebblo daemon](https://daxa-ai.github.io/pebblo-docs/daemon.html) document.\n", + "\n", + "Pebblo Safeloader enables safe data ingestion for Langchain `DocumentLoader`. This is done by wrapping the document loader call with `Pebblo Safe DocumentLoader`.\n", + "\n", + "#### How to Pebblo enable Document Loading?\n", + "\n", + "Assume a Langchain RAG application snippet using `CSVLoader` to read a CSV document for inference.\n", + "\n", + "Here is the snippet of Document loading using `CSVLoader`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.document_loaders.csv_loader import CSVLoader\n", + "\n", + "loader = CSVLoader(\"data/corp_sens_data.csv\")\n", + "documents = loader.load()\n", + "print(documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The Pebblo SafeLoader can be enabled with few lines of code change to the above snippet." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.document_loaders.csv_loader import CSVLoader\n", + "from langchain_community.document_loaders import PebbloSafeLoader\n", + "\n", + "loader = PebbloSafeLoader(\n", + " CSVLoader(\"data/corp_sens_data.csv\"),\n", + " name=\"acme-corp-rag-1\", # App name (Mandatory)\n", + " owner=\"Joe Smith\", # Owner (Optional)\n", + " description=\"Support productivity RAG application\", # Description (Optional)\n", + ")\n", + "documents = loader.load()\n", + "print(documents)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.13" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/integrations/document_loaders/sitemap.ipynb b/docs/docs/integrations/document_loaders/sitemap.ipynb index 9fe9d8e8e6e66..d813f6e52ab51 100644 --- a/docs/docs/integrations/document_loaders/sitemap.ipynb +++ b/docs/docs/integrations/document_loaders/sitemap.ipynb @@ -13,27 +13,16 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Requirement already satisfied: nest_asyncio in /Users/tasp/Code/projects/langchain/.venv/lib/python3.10/site-packages (1.5.6)\n", - "\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip available: \u001b[0m\u001b[31;49m22.3.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.0.1\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" - ] - } - ], + "outputs": [], "source": [ "%pip install --upgrade --quiet nest_asyncio" ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 13, "metadata": {}, "outputs": [], "source": [ @@ -54,11 +43,11 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ - "sitemap_loader = SitemapLoader(web_path=\"https://langchain.readthedocs.io/sitemap.xml\")\n", + "sitemap_loader = SitemapLoader(web_path=\"https://api.python.langchain.com/sitemap.xml\")\n", "\n", "docs = sitemap_loader.load()" ] @@ -90,7 +79,7 @@ { "data": { "text/plain": [ - "Document(page_content='\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nLangChain Python API Reference Documentation.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nYou will be automatically redirected to the new location of this page.\\n\\n', metadata={'source': 'https://api.python.langchain.com/en/stable/', 'loc': 'https://api.python.langchain.com/en/stable/', 'lastmod': '2023-10-13T18:13:26.966937+00:00', 'changefreq': 'weekly', 'priority': '1'})" + "Document(page_content='\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nLangChain Python API Reference Documentation.\\n\\n\\nYou will be automatically redirected to the new location of this page.\\n\\n', metadata={'source': 'https://api.python.langchain.com/en/stable/', 'loc': 'https://api.python.langchain.com/en/stable/', 'lastmod': '2024-02-09T01:10:49.422114+00:00', 'changefreq': 'weekly', 'priority': '1'})" ] }, "execution_count": 6, @@ -113,20 +102,12 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Fetching pages: 100%|##########| 1/1 [00:00<00:00, 16.39it/s]\n" - ] - } - ], + "outputs": [], "source": [ "loader = SitemapLoader(\n", - " web_path=\"https://langchain.readthedocs.io/sitemap.xml\",\n", + " web_path=\" https://api.python.langchain.com/sitemap.xml\",\n", " filter_urls=[\"https://api.python.langchain.com/en/latest\"],\n", ")\n", "documents = loader.load()" @@ -134,7 +115,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 8, "metadata": { "scrolled": true }, @@ -142,10 +123,10 @@ { "data": { "text/plain": [ - "Document(page_content='\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nLangChain Python API Reference Documentation.\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nYou will be automatically redirected to the new location of this page.\\n\\n', metadata={'source': 'https://api.python.langchain.com/en/latest/', 'loc': 'https://api.python.langchain.com/en/latest/', 'lastmod': '2023-10-13T18:09:58.478681+00:00', 'changefreq': 'daily', 'priority': '0.9'})" + "Document(page_content='\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nLangChain Python API Reference Documentation.\\n\\n\\nYou will be automatically redirected to the new location of this page.\\n\\n', metadata={'source': 'https://api.python.langchain.com/en/latest/', 'loc': 'https://api.python.langchain.com/en/latest/', 'lastmod': '2024-02-12T05:26:10.971077+00:00', 'changefreq': 'daily', 'priority': '0.9'})" ] }, - "execution_count": 28, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -183,7 +164,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -211,12 +192,12 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ "loader = SitemapLoader(\n", - " \"https://langchain.readthedocs.io/sitemap.xml\",\n", + " \"https://api.python.langchain.com/sitemap.xml\",\n", " filter_urls=[\"https://api.python.langchain.com/en/latest/\"],\n", " parsing_function=remove_nav_and_header_elements,\n", ")" @@ -233,17 +214,9 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Fetching pages: 100%|##########| 3/3 [00:00<00:00, 12.46it/s]\n" - ] - } - ], + "outputs": [], "source": [ "sitemap_loader = SitemapLoader(web_path=\"example_data/sitemap.xml\", is_local=True)\n", "\n", diff --git a/docs/docs/integrations/document_loaders/source_code.ipynb b/docs/docs/integrations/document_loaders/source_code.ipynb index cbf9679ec707e..c6a75afde81bc 100644 --- a/docs/docs/integrations/document_loaders/source_code.ipynb +++ b/docs/docs/integrations/document_loaders/source_code.ipynb @@ -9,7 +9,35 @@ "\n", "This notebook covers how to load source code files using a special approach with language parsing: each top-level function and class in the code is loaded into separate documents. Any remaining code top-level code outside the already loaded functions and classes will be loaded into a separate document.\n", "\n", - "This approach can potentially improve the accuracy of QA models over source code. Currently, the supported languages for code parsing are Python and JavaScript. The language used for parsing can be configured, along with the minimum number of lines required to activate the splitting based on syntax." + "This approach can potentially improve the accuracy of QA models over source code.\n", + "\n", + "The supported languages for code parsing are:\n", + "\n", + "- C (*)\n", + "- C++ (*)\n", + "- C# (*)\n", + "- COBOL\n", + "- Go (*)\n", + "- Java (*)\n", + "- JavaScript (requires package `esprima`)\n", + "- Kotlin (*)\n", + "- Lua (*)\n", + "- Perl (*)\n", + "- Python\n", + "- Ruby (*)\n", + "- Rust (*)\n", + "- Scala (*)\n", + "- TypeScript (*)\n", + "\n", + "Items marked with (*) require the packages `tree_sitter` and `tree_sitter_languages`.\n", + "It is straightforward to add support for additional languages using `tree_sitter`,\n", + "although this currently requires modifying LangChain.\n", + "\n", + "The language used for parsing can be configured, along with the minimum number of\n", + "lines required to activate the splitting based on syntax.\n", + "\n", + "If a language is not explicitly specified, `LanguageParser` will infer one from\n", + "filename extensions, if present." ] }, { @@ -19,7 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet esprima" + "%pip install -qU esprima esprima tree_sitter tree_sitter_languages" ] }, { @@ -395,6 +423,33 @@ "source": [ "print(\"\\n\\n--8<--\\n\\n\".join([document.page_content for document in result]))" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Adding Languages using Tree-sitter Template\n", + "\n", + "Expanding language support using the Tree-Sitter template involves a few essential steps:\n", + "\n", + "1. **Creating a New Language File**:\n", + " - Begin by creating a new file in the designated directory (langchain/libs/community/langchain_community/document_loaders/parsers/language).\n", + " - Model this file based on the structure and parsing logic of existing language files like **`cpp.py`**.\n", + " - You will also need to create a file in the langchain directory (langchain/libs/langchain/langchain/document_loaders/parsers/language).\n", + "2. **Parsing Language Specifics**:\n", + " - Mimic the structure used in the **`cpp.py`** file, adapting it to suit the language you are incorporating.\n", + " - The primary alteration involves adjusting the chunk query array to suit the syntax and structure of the language you are parsing.\n", + "3. **Testing the Language Parser**:\n", + " - For thorough validation, generate a test file specific to the new language. Create **`test_language.py`** in the designated directory(langchain/libs/community/tests/unit_tests/document_loaders/parsers/language).\n", + " - Follow the example set by **`test_cpp.py`** to establish fundamental tests for the parsed elements in the new language.\n", + "4. **Integration into the Parser and Text Splitter**:\n", + " - Incorporate your new language within the **`language_parser.py`** file. Ensure to update LANGUAGE_EXTENSIONS and LANGUAGE_SEGMENTERS along with the docstring for LanguageParser to recognize and handle the added language.\n", + " - Also, confirm that your language is included in **`text_splitter.py`** in class Language for proper parsing.\n", + "\n", + "By following these steps and ensuring comprehensive testing and integration, you'll successfully extend language support using the Tree-Sitter template.\n", + "\n", + "Best of luck!" + ] } ], "metadata": { @@ -413,7 +468,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.16" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/docs/docs/integrations/llms/ai21.ipynb b/docs/docs/integrations/llms/ai21.ipynb index 2e22f85f11d59..29e698e7d8fe1 100644 --- a/docs/docs/integrations/llms/ai21.ipynb +++ b/docs/docs/integrations/llms/ai21.ipynb @@ -1,137 +1,114 @@ { "cells": [ { - "cell_type": "markdown", - "id": "9597802c", + "cell_type": "raw", + "id": "602a52a4", "metadata": {}, "source": [ - "# AI21\n", - "\n", - "[AI21 Studio](https://docs.ai21.com/) provides API access to `Jurassic-2` large language models.\n", - "\n", - "This example goes over how to use LangChain to interact with [AI21 models](https://docs.ai21.com/docs/jurassic-2-models)." + "---\n", + "sidebar_label: AI21 Labs\n", + "---" ] }, { - "cell_type": "code", - "execution_count": 1, - "id": "02be122d-04e8-4ec6-84d1-f1d8961d6828", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\u001b[33mWARNING: There was an error checking the latest version of pip.\u001b[0m\u001b[33m\n", - "\u001b[0mNote: you may need to restart the kernel to use updated packages.\n" - ] - } - ], - "source": [ - "# install the package:\n", - "%pip install --upgrade --quiet ai21" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "4229227e-6ca2-41ad-a3c3-5f29e3559091", - "metadata": { - "tags": [] - }, - "outputs": [], + "cell_type": "markdown", + "id": "9597802c", + "metadata": {}, "source": [ - "# get AI21_API_KEY. Use https://studio.ai21.com/account/account\n", + "# AI21LLM\n", "\n", - "from getpass import getpass\n", + "This example goes over how to use LangChain to interact with `AI21` models.\n", "\n", - "AI21_API_KEY = getpass()" + "## Installation" ] }, { "cell_type": "code", - "execution_count": 7, - "id": "6fb585dd", - "metadata": { - "tags": [] - }, + "execution_count": null, + "id": "59c710c4", + "metadata": {}, "outputs": [], "source": [ - "from langchain_community.llms import AI21\n", - "from langchain_core.prompts import PromptTemplate" + "!pip install -qU langchain-ai21" ] }, { - "cell_type": "code", - "execution_count": 12, - "id": "035dea0f", + "cell_type": "markdown", + "id": "560a2f9254963fd7", "metadata": { - "tags": [] + "collapsed": false }, - "outputs": [], "source": [ - "template = \"\"\"Question: {question}\n", - "\n", - "Answer: Let's think step by step.\"\"\"\n", + "## Environment Setup\n", "\n", - "prompt = PromptTemplate.from_template(template)" + "We'll need to get a [AI21 API key](https://docs.ai21.com/) and set the `AI21_API_KEY` environment variable:" ] }, { "cell_type": "code", - "execution_count": 9, - "id": "3f3458d9", + "execution_count": 4, + "id": "035dea0f", "metadata": { "tags": [] }, "outputs": [], "source": [ - "llm = AI21(ai21_api_key=AI21_API_KEY)" + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"AI21_API_KEY\"] = getpass()" ] }, { - "cell_type": "code", - "execution_count": 10, - "id": "a641dbd9", + "cell_type": "markdown", + "id": "1891df96eb076e1a", "metadata": { - "tags": [] + "collapsed": false }, - "outputs": [], "source": [ - "llm_chain = prompt | llm" + "## Usage" ] }, { "cell_type": "code", - "execution_count": 13, - "id": "9f0b1960", + "execution_count": 6, + "id": "98f70927a87e4745", "metadata": { - "tags": [] + "collapsed": false }, "outputs": [ { "data": { "text/plain": [ - "'\\nThe Super Bowl in the year Justin Beiber was born was in the year 1991.\\nThe Super Bowl in 1991 was won by the Washington Redskins.\\nFinal answer: Washington Redskins'" + "'\\nLangChain is a decentralized blockchain network that leverages AI and machine learning to provide language translation services.'" ] }, - "execution_count": 13, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "question = \"What NFL team won the Super Bowl in the year Justin Beiber was born?\"\n", + "from langchain_ai21 import AI21LLM\n", + "from langchain_core.prompts import PromptTemplate\n", + "\n", + "template = \"\"\"Question: {question}\n", "\n", - "llm_chain.invoke({\"question\": question})" + "Answer: Let's think step by step.\"\"\"\n", + "\n", + "prompt = PromptTemplate.from_template(template)\n", + "\n", + "model = AI21LLM(model=\"j2-ultra\")\n", + "\n", + "chain = prompt | model\n", + "\n", + "chain.invoke({\"question\": \"What is LangChain?\"})" ] }, { "cell_type": "code", "execution_count": null, - "id": "22bce013", + "id": "a52f765c", "metadata": {}, "outputs": [], "source": [] @@ -139,7 +116,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3.11.1 64-bit", "language": "python", "name": "python3" }, @@ -153,7 +130,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.4" + }, + "vscode": { + "interpreter": { + "hash": "e971737741ff4ec9aff7dc6155a1060a59a8a6d52c757dbbe66bf8ee389494b1" + } } }, "nbformat": 4, diff --git a/docs/docs/integrations/llms/google_ai.ipynb b/docs/docs/integrations/llms/google_ai.ipynb index d5bd9809804d7..540cfca9985b0 100644 --- a/docs/docs/integrations/llms/google_ai.ipynb +++ b/docs/docs/integrations/llms/google_ai.ipynb @@ -264,13 +264,41 @@ " sys.stdout.flush()" ] }, + { + "cell_type": "markdown", + "id": "aefe6df7", + "metadata": {}, + "source": [ + "### Safety Settings\n", + "\n", + "Gemini models have default safety settings that can be overridden. If you are receiving lots of \"Safety Warnings\" from your models, you can try tweaking the `safety_settings` attribute of the model. For example, to turn off safety blocking for dangerous content, you can construct your LLM as follows:" + ] + }, { "cell_type": "code", "execution_count": null, - "id": "aefe6df7", + "id": "7e2682e6", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "from langchain_google_genai import GoogleGenerativeAI, HarmBlockThreshold, HarmCategory\n", + "\n", + "llm = GoogleGenerativeAI(\n", + " model=\"gemini-pro\",\n", + " google_api_key=api_key,\n", + " safety_settings={\n", + " HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE,\n", + " },\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e8d0ee0b", + "metadata": {}, + "source": [ + "For an enumeration of the categories and thresholds available, see Google's [safety setting types](https://ai.google.dev/api/python/google/generativeai/types/SafetySettingDict)." + ] } ], "metadata": { diff --git a/docs/docs/integrations/llms/huggingface_endpoint.ipynb b/docs/docs/integrations/llms/huggingface_endpoint.ipynb new file mode 100644 index 0000000000000..a71a987bac101 --- /dev/null +++ b/docs/docs/integrations/llms/huggingface_endpoint.ipynb @@ -0,0 +1,238 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Huggingface Endpoints\n", + "\n", + ">The [Hugging Face Hub](https://huggingface.co/docs/hub/index) is a platform with over 120k models, 20k datasets, and 50k demo apps (Spaces), all open source and publicly available, in an online platform where people can easily collaborate and build ML together.\n", + "\n", + "The `Hugging Face Hub` also offers various endpoints to build ML applications.\n", + "This example showcases how to connect to the different Endpoints types.\n", + "\n", + "In particular, text generation inference is powered by [Text Generation Inference](https://github.com/huggingface/text-generation-inference): a custom-built Rust, Python and gRPC server for blazing-faset text generation inference." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.llms import HuggingFaceEndpoint" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation and Setup" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To use, you should have the ``huggingface_hub`` python [package installed](https://huggingface.co/docs/huggingface_hub/installation)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet huggingface_hub" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token\n", + "\n", + "from getpass import getpass\n", + "\n", + "HUGGINGFACEHUB_API_TOKEN = getpass()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "os.environ[\"HUGGINGFACEHUB_API_TOKEN\"] = HUGGINGFACEHUB_API_TOKEN" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Prepare Examples" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.llms import HuggingFaceEndpoint" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.chains import LLMChain\n", + "from langchain.prompts import PromptTemplate" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "question = \"Who won the FIFA World Cup in the year 1994? \"\n", + "\n", + "template = \"\"\"Question: {question}\n", + "\n", + "Answer: Let's think step by step.\"\"\"\n", + "\n", + "prompt = PromptTemplate.from_template(template)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Examples\n", + "\n", + "Here is an example of how you can access `HuggingFaceEndpoint` integration of the free [Serverless Endpoints](https://huggingface.co/inference-endpoints/serverless) API." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "repo_id = \"mistralai/Mistral-7B-Instruct-v0.2\"\n", + "\n", + "llm = HuggingFaceEndpoint(\n", + " repo_id=repo_id, max_length=128, temperature=0.5, token=HUGGINGFACEHUB_API_TOKEN\n", + ")\n", + "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", + "print(llm_chain.run(question))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dedicated Endpoint\n", + "\n", + "\n", + "The free serverless API lets you implement solutions and iterate in no time, but it may be rate limited for heavy use cases, since the loads are shared with other requests.\n", + "\n", + "For enterprise workloads, the best is to use [Inference Endpoints - Dedicated](https://huggingface.co/inference-endpoints/dedicated).\n", + "This gives access to a fully managed infrastructure that offer more flexibility and speed. These resoucres come with continuous support and uptime guarantees, as well as options like AutoScaling\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Set the url to your Inference Endpoint below\n", + "your_endpoint_url = \"https://fayjubiy2xqn36z0.us-east-1.aws.endpoints.huggingface.cloud\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "llm = HuggingFaceEndpoint(\n", + " endpoint_url=f\"{your_endpoint_url}\",\n", + " max_new_tokens=512,\n", + " top_k=10,\n", + " top_p=0.95,\n", + " typical_p=0.95,\n", + " temperature=0.01,\n", + " repetition_penalty=1.03,\n", + ")\n", + "llm(\"What did foo say about bar?\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Streaming" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", + "from langchain_community.llms import HuggingFaceEndpoint\n", + "\n", + "llm = HuggingFaceEndpoint(\n", + " endpoint_url=f\"{your_endpoint_url}\",\n", + " max_new_tokens=512,\n", + " top_k=10,\n", + " top_p=0.95,\n", + " typical_p=0.95,\n", + " temperature=0.01,\n", + " repetition_penalty=1.03,\n", + " streaming=True,\n", + ")\n", + "llm(\"What did foo say about bar?\", callbacks=[StreamingStdOutCallbackHandler()])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "agents", + "language": "python", + "name": "agents" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + }, + "vscode": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/integrations/llms/huggingface_hub.ipynb b/docs/docs/integrations/llms/huggingface_hub.ipynb deleted file mode 100644 index 67dbe3c41f385..0000000000000 --- a/docs/docs/integrations/llms/huggingface_hub.ipynb +++ /dev/null @@ -1,466 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "959300d4", - "metadata": {}, - "source": [ - "# Hugging Face Hub\n", - "\n", - ">The [Hugging Face Hub](https://huggingface.co/docs/hub/index) is a platform with over 120k models, 20k datasets, and 50k demo apps (Spaces), all open source and publicly available, in an online platform where people can easily collaborate and build ML together.\n", - "\n", - "This example showcases how to connect to the `Hugging Face Hub` and use different models." - ] - }, - { - "cell_type": "markdown", - "id": "1ddafc6d-7d7c-48fa-838f-0e7f50895ce3", - "metadata": {}, - "source": [ - "## Installation and Setup" - ] - }, - { - "cell_type": "markdown", - "id": "4c1b8450-5eaf-4d34-8341-2d785448a1ff", - "metadata": { - "tags": [] - }, - "source": [ - "To use, you should have the ``huggingface_hub`` python [package installed](https://huggingface.co/docs/huggingface_hub/installation)." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d772b637-de00-4663-bd77-9bc96d798db2", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "%pip install --upgrade --quiet huggingface_hub" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "id": "d597a792-354c-4ca5-b483-5965eec5d63d", - "metadata": {}, - "outputs": [ - { - "name": "stdin", - "output_type": "stream", - "text": [ - " ยทยทยทยทยทยทยทยท\n" - ] - } - ], - "source": [ - "# get a token: https://huggingface.co/docs/api-inference/quicktour#get-your-api-token\n", - "\n", - "from getpass import getpass\n", - "\n", - "HUGGINGFACEHUB_API_TOKEN = getpass()" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "id": "b8c5b88c-e4b8-4d0d-9a35-6e8f106452c2", - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "\n", - "os.environ[\"HUGGINGFACEHUB_API_TOKEN\"] = HUGGINGFACEHUB_API_TOKEN" - ] - }, - { - "cell_type": "markdown", - "id": "84dd44c1-c428-41f3-a911-520281386c94", - "metadata": {}, - "source": [ - "## Prepare Examples" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "3fe7d1d1-241d-426a-acff-e208f1088871", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_community.llms import HuggingFaceHub" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "id": "6620f39b-3d32-4840-8931-ff7d2c3e47e8", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain.chains import LLMChain\n", - "from langchain.prompts import PromptTemplate" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "44adc1a0-9c0a-4f1e-af5a-fe04222e78d7", - "metadata": {}, - "outputs": [], - "source": [ - "question = \"Who won the FIFA World Cup in the year 1994? \"\n", - "\n", - "template = \"\"\"Question: {question}\n", - "\n", - "Answer: Let's think step by step.\"\"\"\n", - "\n", - "prompt = PromptTemplate.from_template(template)" - ] - }, - { - "cell_type": "markdown", - "id": "ddaa06cf-95ec-48ce-b0ab-d892a7909693", - "metadata": {}, - "source": [ - "## Examples\n", - "\n", - "Below are some examples of models you can access through the `Hugging Face Hub` integration." - ] - }, - { - "cell_type": "markdown", - "id": "4c16fded-70d1-42af-8bfa-6ddda9f0bc63", - "metadata": {}, - "source": [ - "### `Flan`, by `Google`" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "id": "39c7eeac-01c4-486b-9480-e828a9e73e78", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "repo_id = \"google/flan-t5-xxl\" # See https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads for some other options" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "id": "3acf0069", - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The FIFA World Cup was held in the year 1994. West Germany won the FIFA World Cup in 1994\n" - ] - } - ], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "1a5c97af-89bc-4e59-95c1-223742a9160b", - "metadata": {}, - "source": [ - "### `Dolly`, by `Databricks`\n", - "\n", - "See [Databricks](https://huggingface.co/databricks) organization page for a list of available models." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "521fcd2b-8e38-4920-b407-5c7d330411c9", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"databricks/dolly-v2-3b\"" - ] - }, - { - "cell_type": "code", - "execution_count": 10, - "id": "9907ec3a-fe0c-4543-81c4-d42f9453f16c", - "metadata": { - "tags": [] - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - " First of all, the world cup was won by the Germany. Then the Argentina won the world cup in 2022. So, the Argentina won the world cup in 1994.\n", - "\n", - "\n", - "Question: Who\n" - ] - } - ], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "03f6ae52-b5f9-4de6-832c-551cb3fa11ae", - "metadata": {}, - "source": [ - "### `Camel`, by `Writer`\n", - "\n", - "See [Writer's](https://huggingface.co/Writer) organization page for a list of available models." - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "257a091d-750b-4910-ac08-fe1c7b3fd98b", - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "repo_id = \"Writer/camel-5b-hf\" # See https://huggingface.co/Writer for other options" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b06f6838-a11a-4d6a-88e3-91fa1747a2b3", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "2bf838eb-1083-402f-b099-b07c452418c8", - "metadata": {}, - "source": [ - "### `XGen`, by `Salesforce`\n", - "\n", - "See [more information](https://github.com/salesforce/xgen)." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "18c78880-65d7-41d0-9722-18090efb60e9", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"Salesforce/xgen-7b-8k-base\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "1b1150b4-ec30-4674-849e-6a41b085aa2b", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "0aca9f9e-f333-449c-97b2-10d1dbf17e75", - "metadata": {}, - "source": [ - "### `Falcon`, by `Technology Innovation Institute (TII)`\n", - "\n", - "See [more information](https://huggingface.co/tiiuae/falcon-40b)." - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "496b35ac-5ee2-4b68-a6ce-232608f56c03", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"tiiuae/falcon-40b\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "ff2541ad-e394-4179-93c2-7ae9c4ca2a25", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"temperature\": 0.5, \"max_length\": 64}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "7e15849b-5561-4bb9-86ec-6412ca10196a", - "metadata": {}, - "source": [ - "### `InternLM-Chat`, by `Shanghai AI Laboratory`\n", - "\n", - "See [more information](https://huggingface.co/internlm/internlm-7b)." - ] - }, - { - "cell_type": "code", - "execution_count": 19, - "id": "3b533461-59f8-406e-907b-000841fa60a7", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"internlm/internlm-chat-7b\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "c71210b9-5895-41a2-889a-f430d22fa1aa", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"max_length\": 128, \"temperature\": 0.8}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "4f2e5132-1713-42d7-919a-8c313744ce95", - "metadata": {}, - "source": [ - "### `Qwen`, by `Alibaba Cloud`\n", - "\n", - ">`Tongyi Qianwen-7B` (`Qwen-7B`) is a model with a scale of 7 billion parameters in the `Tongyi Qianwen` large model series developed by `Alibaba Cloud`. `Qwen-7B` is a large language model based on Transformer, which is trained on ultra-large-scale pre-training data.\n", - "\n", - "See [more information on HuggingFace](https://huggingface.co/Qwen/Qwen-7B) of on [GitHub](https://github.com/QwenLM/Qwen-7B).\n", - "\n", - "See here a [big example for LangChain integration and Qwen](https://github.com/QwenLM/Qwen-7B/blob/main/examples/langchain_tooluse.ipynb)." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "id": "f598b1ca-77c7-40f1-a83f-c21ea9910c88", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"Qwen/Qwen-7B\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2c97f4e2-d401-44fb-9da7-b60b2e2cc663", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"max_length\": 128, \"temperature\": 0.5}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "markdown", - "id": "e3871376-ed0e-49a8-8d9b-7e60dbbd2b35", - "metadata": {}, - "source": [ - "### `Yi` series models, by `01.ai`\n", - "\n", - ">The `Yi` series models are large language models trained from scratch by developers at [01.ai](https://01.ai/). The first public release contains two bilingual(English/Chinese) base models with the parameter sizes of 6B(`Yi-6B`) and 34B(`Yi-34B`). Both of them are trained with 4K sequence length and can be extended to 32K during inference time. The `Yi-6B-200K` and `Yi-34B-200K` are base model with 200K context length.\n", - "\n", - "Here we test the [Yi-34B](https://huggingface.co/01-ai/Yi-34B) model." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "id": "1c9d3125-3f50-48b8-93b6-b50847207afa", - "metadata": {}, - "outputs": [], - "source": [ - "repo_id = \"01-ai/Yi-34B\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "8b661069-8229-4850-9f13-c4ca28c0c96b", - "metadata": {}, - "outputs": [], - "source": [ - "llm = HuggingFaceHub(\n", - " repo_id=repo_id, model_kwargs={\"max_length\": 128, \"temperature\": 0.5}\n", - ")\n", - "llm_chain = LLMChain(prompt=prompt, llm=llm)\n", - "print(llm_chain.run(question))" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "dd6f3edc-9f97-47a6-ab2c-116756babbe6", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.10.12" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/docs/docs/integrations/llms/huggingface_textgen_inference.ipynb b/docs/docs/integrations/llms/huggingface_textgen_inference.ipynb deleted file mode 100644 index e9b5e31c38600..0000000000000 --- a/docs/docs/integrations/llms/huggingface_textgen_inference.ipynb +++ /dev/null @@ -1,108 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Huggingface TextGen Inference\n", - "\n", - "[Text Generation Inference](https://github.com/huggingface/text-generation-inference) is a Rust, Python and gRPC server for text generation inference. Used in production at [HuggingFace](https://huggingface.co/) to power LLMs api-inference widgets.\n", - "\n", - "This notebooks goes over how to use a self hosted LLM using `Text Generation Inference`." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "To use, you should have the `text_generation` python package installed." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "tags": [] - }, - "outputs": [], - "source": [ - "# !pip3 install text_generation" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_community.llms import HuggingFaceTextGenInference\n", - "\n", - "llm = HuggingFaceTextGenInference(\n", - " inference_server_url=\"http://localhost:8010/\",\n", - " max_new_tokens=512,\n", - " top_k=10,\n", - " top_p=0.95,\n", - " typical_p=0.95,\n", - " temperature=0.01,\n", - " repetition_penalty=1.03,\n", - ")\n", - "llm(\"What did foo say about bar?\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Streaming" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", - "from langchain_community.llms import HuggingFaceTextGenInference\n", - "\n", - "llm = HuggingFaceTextGenInference(\n", - " inference_server_url=\"http://localhost:8010/\",\n", - " max_new_tokens=512,\n", - " top_k=10,\n", - " top_p=0.95,\n", - " typical_p=0.95,\n", - " temperature=0.01,\n", - " repetition_penalty=1.03,\n", - " streaming=True,\n", - ")\n", - "llm(\"What did foo say about bar?\", callbacks=[StreamingStdOutCallbackHandler()])" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.11.3" - }, - "vscode": { - "interpreter": { - "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" - } - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/docs/docs/integrations/llms/ibm_watsonx.ipynb b/docs/docs/integrations/llms/ibm_watsonx.ipynb index 32ecdc34fc3e0..eafeb095d83ec 100644 --- a/docs/docs/integrations/llms/ibm_watsonx.ipynb +++ b/docs/docs/integrations/llms/ibm_watsonx.ipynb @@ -109,7 +109,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.llms import WatsonxLLM\n", + "from langchain_ibm import WatsonxLLM\n", "\n", "watsonx_llm = WatsonxLLM(\n", " model_id=\"google/flan-ul2\",\n", diff --git a/docs/docs/integrations/llms/llamafile.ipynb b/docs/docs/integrations/llms/llamafile.ipynb new file mode 100644 index 0000000000000..2b778f4d274b1 --- /dev/null +++ b/docs/docs/integrations/llms/llamafile.ipynb @@ -0,0 +1,133 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Llamafile\n", + "\n", + "[Llamafile](https://github.com/Mozilla-Ocho/llamafile) lets you distribute and run LLMs with a single file.\n", + "\n", + "Llamafile does this by combining [llama.cpp](https://github.com/ggerganov/llama.cpp) with [Cosmopolitan Libc](https://github.com/jart/cosmopolitan) into one framework that collapses all the complexity of LLMs down to a single-file executable (called a \"llamafile\") that runs locally on most computers, with no installation.\n", + "\n", + "## Setup\n", + "\n", + "1. Download a llamafile for the model you'd like to use. You can find many models in llamafile format on [HuggingFace](https://huggingface.co/models?other=llamafile). In this guide, we will download a small one, `TinyLlama-1.1B-Chat-v1.0.Q5_K_M`. Note: if you don't have `wget`, you can just download the model via this [link](https://huggingface.co/jartine/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/TinyLlama-1.1B-Chat-v1.0.Q5_K_M.llamafile?download=true).\n", + "\n", + "```bash\n", + "wget https://huggingface.co/jartine/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/TinyLlama-1.1B-Chat-v1.0.Q5_K_M.llamafile\n", + "```\n", + "\n", + "2. Make the llamafile executable. First, if you haven't done so already, open a terminal. **If you're using MacOS, Linux, or BSD,** you'll need to grant permission for your computer to execute this new file using `chmod` (see below). **If you're on Windows,** rename the file by adding \".exe\" to the end (model file should be named `TinyLlama-1.1B-Chat-v1.0.Q5_K_M.llamafile.exe`).\n", + "\n", + "\n", + "```bash\n", + "chmod +x TinyLlama-1.1B-Chat-v1.0.Q5_K_M.llamafile # run if you're on MacOS, Linux, or BSD\n", + "```\n", + "\n", + "3. Run the llamafile in \"server mode\":\n", + "\n", + "```bash\n", + "./TinyLlama-1.1B-Chat-v1.0.Q5_K_M.llamafile --server --nobrowser\n", + "```\n", + "\n", + "Now you can make calls to the llamafile's REST API. By default, the llamafile server listens at http://localhost:8080. You can find full server documentation [here](https://github.com/Mozilla-Ocho/llamafile/blob/main/llama.cpp/server/README.md#api-endpoints). You can interact with the llamafile directly via the REST API, but here we'll show how to interact with it using LangChain.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'? \\nI\\'ve got a thing for pink, but you know that.\\n\"Can we not talk about work anymore?\" - What did she say?\\nI don\\'t want to be a burden on you.\\nIt\\'s hard to keep a good thing going.\\nYou can\\'t tell me what I want, I have a life too!'" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from langchain_community.llms.llamafile import Llamafile\n", + "\n", + "llm = Llamafile()\n", + "\n", + "llm.invoke(\"Tell me a joke\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To stream tokens, use the `.stream(...)` method:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + ".\n", + "- She said, \"Iโ€™m tired of my life. What should I do?\"\n", + "- The man replied, \"I hear you. But donโ€™t worry. Life is just like a joke. It has its funny parts too.\"\n", + "- The woman looked at him, amazed and happy to hear his wise words. - \"Thank you for your wisdom,\" she said, smiling. - He replied, \"Any time. But it doesn't come easy. You have to laugh and keep moving forward in life.\"\n", + "- She nodded, thanking him again. - The man smiled wryly. \"Life can be tough. Sometimes it seems like youโ€™re never going to get out of your situation.\"\n", + "- He said, \"I know that. But the key is not giving up. Life has many ups and downs, but in the end, it will turn out okay.\"\n", + "- The woman's eyes softened. \"Thank you for your advice. It's so important to keep moving forward in life,\" she said. - He nodded once again. \"Youโ€™re welcome. I hope your journey is filled with laughter and joy.\"\n", + "- They both smiled and left the bar, ready to embark on their respective adventures.\n" + ] + } + ], + "source": [ + "query = \"Tell me a joke\"\n", + "\n", + "for chunks in llm.stream(query):\n", + " print(chunks, end=\"\")\n", + "\n", + "print()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To learn more about the LangChain Expressive Language and the available methods on an LLM, see the [LCEL Interface](https://python.langchain.com/docs/expression_language/interface)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.7" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/docs/integrations/llms/llm_caching.ipynb b/docs/docs/integrations/llms/llm_caching.ipynb index 791ff870b0fda..f428939a2a8c8 100644 --- a/docs/docs/integrations/llms/llm_caching.ipynb +++ b/docs/docs/integrations/llms/llm_caching.ipynb @@ -1131,6 +1131,16 @@ "print(llm(\"How come we always see one face of the moon?\"))" ] }, + { + "cell_type": "markdown", + "id": "55dc84b3-37cb-4f19-b175-40e18e06f83f", + "metadata": {}, + "source": [ + "#### Attribution statement\n", + "\n", + ">Apache Cassandra, Cassandra and Apache are either registered trademarks or trademarks of the [Apache Software Foundation](http://www.apache.org/) in the United States and/or other countries." + ] + }, { "cell_type": "markdown", "id": "8712f8fc-bb89-4164-beb9-c672778bbd91", @@ -1588,7 +1598,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.1" + "version": "3.9.17" } }, "nbformat": 4, diff --git a/docs/docs/integrations/llms/oci_generative_ai.ipynb b/docs/docs/integrations/llms/oci_generative_ai.ipynb index 200f1038f3e56..40332f13e1ce7 100644 --- a/docs/docs/integrations/llms/oci_generative_ai.ipynb +++ b/docs/docs/integrations/llms/oci_generative_ai.ipynb @@ -98,6 +98,9 @@ " model_id=\"MY_MODEL\",\n", " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", " compartment_id=\"MY_OCID\",\n", + " auth_type=\"SECURITY_TOKEN\",\n", + " auth_profile=\"MY_PROFILE\", # replace with your profile name\n", + " model_kwargs={\"temperature\": 0.7, \"top_p\": 0.75, \"max_tokens\": 200},\n", ")\n", "\n", "prompt = PromptTemplate(input_variables=[\"query\"], template=\"{query}\")\n", @@ -158,13 +161,6 @@ "print(chain.invoke(\"when was oracle founded?\"))\n", "print(chain.invoke(\"where is oracle headquartered?\"))" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/docs/docs/integrations/llms/yuan2.ipynb b/docs/docs/integrations/llms/yuan2.ipynb new file mode 100644 index 0000000000000..06e45df81df8f --- /dev/null +++ b/docs/docs/integrations/llms/yuan2.ipynb @@ -0,0 +1,117 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": false, + "pycharm": { + "name": "#%% md\n" + } + }, + "source": [ + "# Yuan2.0\n", + "\n", + "[Yuan2.0](https://github.com/IEIT-Yuan/Yuan-2.0) is a new generation Fundamental Large Language Model developed by IEIT System. We have published all three models, Yuan 2.0-102B, Yuan 2.0-51B, and Yuan 2.0-2B. And we provide relevant scripts for pretraining, fine-tuning, and inference services for other developers. Yuan2.0 is based on Yuan1.0, utilizing a wider range of high-quality pre training data and instruction fine-tuning datasets to enhance the model's understanding of semantics, mathematics, reasoning, code, knowledge, and other aspects.\n", + "\n", + "This example goes over how to use LangChain to interact with `Yuan2.0`(2B/51B/102B) Inference for text generation.\n", + "\n", + "Yuan2.0 set up an inference service so user just need request the inference api to get result, which is introduced in [Yuan2.0 Inference-Server](https://github.com/IEIT-Yuan/Yuan-2.0/blob/main/docs/inference_server.md)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "from langchain.chains import LLMChain\n", + "from langchain_community.llms.yuan2 import Yuan2" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "# default infer_api for a local deployed Yuan2.0 inference server\n", + "infer_api = \"http://127.0.0.1:8000\"\n", + "\n", + "# direct access endpoint in a proxied environment\n", + "# import os\n", + "# os.environ[\"no_proxy\"]=\"localhost,127.0.0.1,::1\"\n", + "\n", + "yuan_llm = Yuan2(\n", + " infer_api=infer_api,\n", + " max_tokens=2048,\n", + " temp=1.0,\n", + " top_p=0.9,\n", + " top_k=40,\n", + " use_history=False,\n", + ")\n", + "\n", + "# turn on use_history only when you want the Yuan2.0 to keep track of the conversation history\n", + "# and send the accumulated context to the backend model api, which make it stateful. By default it is stateless.\n", + "# llm.use_history = True" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "pycharm": { + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "question = \"่ฏทไป‹็ปไธ€ไธ‹ไธญๅ›ฝใ€‚\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "pycharm": { + "is_executing": true, + "name": "#%%\n" + } + }, + "outputs": [], + "source": [ + "print(yuan_llm(question))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "langchain-dev", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} \ No newline at end of file diff --git a/docs/docs/integrations/memory/astradb_chat_message_history.ipynb b/docs/docs/integrations/memory/astradb_chat_message_history.ipynb index abe6b1b2876f6..068b804f00b45 100644 --- a/docs/docs/integrations/memory/astradb_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/astradb_chat_message_history.ipynb @@ -32,7 +32,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet \"astrapy>=0.6.2\"" + "%pip install --upgrade --quiet \"astrapy>=0.7.1\"" ] }, { diff --git a/docs/docs/integrations/memory/aws_dynamodb.ipynb b/docs/docs/integrations/memory/aws_dynamodb.ipynb index 32001587bfbbc..9410bbe024c54 100644 --- a/docs/docs/integrations/memory/aws_dynamodb.ipynb +++ b/docs/docs/integrations/memory/aws_dynamodb.ipynb @@ -274,8 +274,6 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Optional\n", - "\n", "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.runnables.history import RunnableWithMessageHistory\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/integrations/memory/cassandra_chat_message_history.ipynb b/docs/docs/integrations/memory/cassandra_chat_message_history.ipynb index d802bc785da35..64ead129f51c0 100644 --- a/docs/docs/integrations/memory/cassandra_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/cassandra_chat_message_history.ipynb @@ -145,6 +145,24 @@ "source": [ "message_history.messages" ] + }, + { + "cell_type": "markdown", + "id": "59902d0f-e9ba-4e3d-a7e0-ce202b9d3c43", + "metadata": {}, + "source": [ + "#### Attribution statement\n", + "\n", + "> Apache Cassandra, Cassandra and Apache are either registered trademarks or trademarks of the [Apache Software Foundation](http://www.apache.org/) in the United States and/or other countries." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7efaa51c-e9ee-4dce-80a4-eb9280a0dbe5", + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { @@ -163,7 +181,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.9.17" } }, "nbformat": 4, diff --git a/docs/docs/integrations/memory/mongodb_chat_message_history.ipynb b/docs/docs/integrations/memory/mongodb_chat_message_history.ipynb index 8356154fcda4c..88771c7680fce 100644 --- a/docs/docs/integrations/memory/mongodb_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/mongodb_chat_message_history.ipynb @@ -133,8 +133,6 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Optional\n", - "\n", "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.runnables.history import RunnableWithMessageHistory\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/integrations/memory/remembrall.md b/docs/docs/integrations/memory/remembrall.md index 9cc03fd75e788..8f8b818785947 100644 --- a/docs/docs/integrations/memory/remembrall.md +++ b/docs/docs/integrations/memory/remembrall.md @@ -16,6 +16,12 @@ To get started, [sign in with Github on the Remembrall platform](https://remembr Any request that you send with the modified `openai_api_base` (see below) and Remembrall API key will automatically be tracked in the Remembrall dashboard. You **never** have to share your OpenAI key with our platform and this information is **never** stored by the Remembrall systems. +To do this, we need to install the following dependencies: + +```bash +pip install -U langchain-openai +``` + ### Enable Long Term Memory In addition to setting the `openai_api_base` and Remembrall API key via `x-gp-api-key`, you should specify a UID to maintain memory for. This will usually be a unique user identifier (like email). diff --git a/docs/docs/integrations/memory/sql_chat_message_history.ipynb b/docs/docs/integrations/memory/sql_chat_message_history.ipynb index 859399156724e..e8cca612624a6 100644 --- a/docs/docs/integrations/memory/sql_chat_message_history.ipynb +++ b/docs/docs/integrations/memory/sql_chat_message_history.ipynb @@ -26,7 +26,7 @@ "The integration lives in the `langchain-community` package, so we need to install that. We also need to install the `SQLAlchemy` package.\n", "\n", "```bash\n", - "pip install -U langchain-community SQLAlchemy\n", + "pip install -U langchain-community SQLAlchemy langchain-openai\n", "```" ] }, @@ -71,10 +71,7 @@ "end_time": "2023-08-28T10:04:38.077748Z", "start_time": "2023-08-28T10:04:36.105894Z" }, - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } + "collapsed": false }, "outputs": [], "source": [ @@ -97,10 +94,7 @@ "end_time": "2023-08-28T10:04:38.929396Z", "start_time": "2023-08-28T10:04:38.915727Z" }, - "collapsed": false, - "jupyter": { - "outputs_hidden": false - } + "collapsed": false }, "outputs": [ { @@ -137,8 +131,6 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Optional\n", - "\n", "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.runnables.history import RunnableWithMessageHistory\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/integrations/memory/sqlite.ipynb b/docs/docs/integrations/memory/sqlite.ipynb index 200335cc4df4e..21dde4e114bf4 100644 --- a/docs/docs/integrations/memory/sqlite.ipynb +++ b/docs/docs/integrations/memory/sqlite.ipynb @@ -119,8 +119,6 @@ "metadata": {}, "outputs": [], "source": [ - "from typing import Optional\n", - "\n", "from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder\n", "from langchain_core.runnables.history import RunnableWithMessageHistory\n", "from langchain_openai import ChatOpenAI" diff --git a/docs/docs/integrations/platforms/index.mdx b/docs/docs/integrations/platforms/index.mdx index ee66df3324ae7..14c09f2fa8c45 100644 --- a/docs/docs/integrations/platforms/index.mdx +++ b/docs/docs/integrations/platforms/index.mdx @@ -5,10 +5,12 @@ sidebar_class_name: hidden # Providers -LangChain integrates with many providers +LangChain integrates with many providers. ## Partner Packages +These providers have standalone `langchain-{provider}` packages for improved versioning, dependency management and testing. + - [OpenAI](/docs/integrations/platforms/openai) - [Anthropic](/docs/integrations/platforms/anthropic) - [Google](/docs/integrations/platforms/google) @@ -25,3 +27,7 @@ LangChain integrates with many providers - [AWS](/docs/integrations/platforms/aws) - [Hugging Face](/docs/integrations/platforms/huggingface) - [Microsoft](/docs/integrations/platforms/microsoft) + +## All Providers + +Click [here](/docs/integrations/providers/) to see all providers. diff --git a/docs/docs/integrations/providers/apache_doris.mdx b/docs/docs/integrations/providers/apache_doris.mdx new file mode 100644 index 0000000000000..93db933030986 --- /dev/null +++ b/docs/docs/integrations/providers/apache_doris.mdx @@ -0,0 +1,21 @@ +# Apache Doris + +>[Apache Doris](https://doris.apache.org/) is a modern data warehouse for real-time analytics. +It delivers lightning-fast analytics on real-time data at scale. + +>Usually `Apache Doris` is categorized into OLAP, and it has showed excellent performance in [ClickBench โ€” a Benchmark For Analytical DBMS](https://benchmark.clickhouse.com/). Since it has a super-fast vectorized execution engine, it could also be used as a fast vectordb. + +## Installation and Setup + + +```bash +pip install pymysql +``` + +## Vector Store + +See a [usage example](/docs/integrations/vectorstores/apache_doris). + +```python +from langchain_community.vectorstores import ApacheDoris +``` diff --git a/docs/docs/integrations/providers/astradb.mdx b/docs/docs/integrations/providers/astradb.mdx index 20a94d736b361..8cc4788cc28fe 100644 --- a/docs/docs/integrations/providers/astradb.mdx +++ b/docs/docs/integrations/providers/astradb.mdx @@ -1,25 +1,21 @@ # Astra DB -This page lists the integrations available with [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) and [Apache Cassandraยฎ](https://cassandra.apache.org/). +> DataStax [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) is a serverless vector-capable database built on Apache Cassandraยฎ and made conveniently available +> through an easy-to-use JSON API. ### Setup Install the following Python package: ```bash -pip install "astrapy>=0.5.3" +pip install "astrapy>=0.7.1" ``` -## Astra DB - -> DataStax [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) is a serverless vector-capable database built on Cassandra and made conveniently available -> through an easy-to-use JSON API. - -### Vector Store +## Vector Store ```python -from langchain_community.vectorstores import AstraDB -vector_store = AstraDB( +from langchain_astradb import AstraDBVectorStore +vector_store = AstraDBVectorStore( embedding=my_embedding, collection_name="my_store", api_endpoint="...", @@ -29,11 +25,22 @@ vector_store = AstraDB( Learn more in the [example notebook](/docs/integrations/vectorstores/astradb). -### LLM Cache +## Chat message history + +```python +from langchain_community.chat_message_histories import AstraDBChatMessageHistory +message_history = AstraDBChatMessageHistory( + session_id="test-session", + api_endpoint="...", + token="...", +) +``` + +## LLM Cache ```python from langchain.globals import set_llm_cache -from langchain.cache import AstraDBCache +from langchain_community.cache import AstraDBCache set_llm_cache(AstraDBCache( api_endpoint="...", token="...", @@ -43,11 +50,11 @@ set_llm_cache(AstraDBCache( Learn more in the [example notebook](/docs/integrations/llms/llm_caching#astra-db-caches) (scroll to the Astra DB section). -### Semantic LLM Cache +## Semantic LLM Cache ```python from langchain.globals import set_llm_cache -from langchain.cache import AstraDBSemanticCache +from langchain_community.cache import AstraDBSemanticCache set_llm_cache(AstraDBSemanticCache( embedding=my_embedding, api_endpoint="...", @@ -57,20 +64,9 @@ set_llm_cache(AstraDBSemanticCache( Learn more in the [example notebook](/docs/integrations/llms/llm_caching#astra-db-caches) (scroll to the appropriate section). -### Chat message history - -```python -from langchain.memory import AstraDBChatMessageHistory -message_history = AstraDBChatMessageHistory( - session_id="test-session", - api_endpoint="...", - token="...", -) -``` - Learn more in the [example notebook](/docs/integrations/memory/astradb_chat_message_history). -### Document loader +## Document loader ```python from langchain_community.document_loaders import AstraDBLoader @@ -83,13 +79,13 @@ loader = AstraDBLoader( Learn more in the [example notebook](/docs/integrations/document_loaders/astradb). -### Self-querying retriever +## Self-querying retriever ```python -from langchain_community.vectorstores import AstraDB +from langchain_astradb import AstraDBVectorStore from langchain.retrievers.self_query.base import SelfQueryRetriever -vector_store = AstraDB( +vector_store = AstraDBVectorStore( embedding=my_embedding, collection_name="my_store", api_endpoint="...", @@ -106,7 +102,7 @@ retriever = SelfQueryRetriever.from_llm( Learn more in the [example notebook](/docs/integrations/retrievers/self_query/astradb). -### Store +## Store ```python from langchain_community.storage import AstraDBStore @@ -119,7 +115,7 @@ store = AstraDBStore( Learn more in the [example notebook](/docs/integrations/stores/astradb#astradbstore). -### Byte Store +## Byte Store ```python from langchain_community.storage import AstraDBByteStore @@ -131,57 +127,3 @@ store = AstraDBByteStore( ``` Learn more in the [example notebook](/docs/integrations/stores/astradb#astradbbytestore). - -## Apache Cassandra and Astra DB through CQL - -> [Cassandra](https://cassandra.apache.org/) is a NoSQL, row-oriented, highly scalable and highly available database. -> Starting with version 5.0, the database ships with [vector search capabilities](https://cassandra.apache.org/doc/trunk/cassandra/vector-search/overview.html). -> DataStax [Astra DB through CQL](https://docs.datastax.com/en/astra-serverless/docs/vector-search/quickstart.html) is a managed serverless database built on Cassandra, offering the same interface and strengths. - -These databases use the CQL protocol (Cassandra Query Language). -Hence, a different set of connectors, outlined below, shall be used. - -### Vector Store - -```python -from langchain_community.vectorstores import Cassandra -vector_store = Cassandra( - embedding=my_embedding, - table_name="my_store", -) -``` - -Learn more in the [example notebook](/docs/integrations/vectorstores/astradb#apache-cassandra-and-astra-db-through-cql) (scroll down to the CQL-specific section). - - -### Memory - -```python -from langchain.memory import CassandraChatMessageHistory -message_history = CassandraChatMessageHistory(session_id="my-session") -``` - -Learn more in the [example notebook](/docs/integrations/memory/cassandra_chat_message_history). - - -### LLM Cache - -```python -from langchain.cache import CassandraCache -langchain.llm_cache = CassandraCache() -``` - -Learn more in the [example notebook](/docs/integrations/llms/llm_caching#cassandra-caches) (scroll to the Cassandra section). - - -### Semantic LLM Cache - -```python -from langchain.cache import CassandraSemanticCache -cassSemanticCache = CassandraSemanticCache( - embedding=my_embedding, - table_name="my_store", -) -``` - -Learn more in the [example notebook](/docs/integrations/llms/llm_caching#cassandra-caches) (scroll to the appropriate section). diff --git a/docs/docs/integrations/providers/cassandra.mdx b/docs/docs/integrations/providers/cassandra.mdx new file mode 100644 index 0000000000000..392f7a1767e3e --- /dev/null +++ b/docs/docs/integrations/providers/cassandra.mdx @@ -0,0 +1,76 @@ +# Apache Cassandra + +> [Apache Cassandraยฎ](https://cassandra.apache.org/) is a NoSQL, row-oriented, highly scalable and highly available database. +> Starting with version 5.0, the database ships with [vector search capabilities](https://cassandra.apache.org/doc/trunk/cassandra/vector-search/overview.html). + +The integrations outlined in this page can be used with Cassandra as well as other CQL-compatible databases, i.e. those using the Cassandra Query Language protocol. + + +### Setup + +Install the following Python package: + +```bash +pip install "cassio>=0.1.4" +``` + + +## Vector Store + +```python +from langchain_community.vectorstores import Cassandra +vector_store = Cassandra( + embedding=my_embedding, + table_name="my_store", +) +``` + +Learn more in the [example notebook](/docs/integrations/vectorstores/cassandra). + +## Chat message history + +```python +from langchain_community.chat_message_histories import CassandraChatMessageHistory +message_history = CassandraChatMessageHistory(session_id="my-session") +``` + +Learn more in the [example notebook](/docs/integrations/memory/cassandra_chat_message_history). + + +## LLM Cache + +```python +from langchain.globals import set_llm_cache +from langchain_community.cache import CassandraCache +set_llm_cache(CassandraCache()) +``` + +Learn more in the [example notebook](/docs/integrations/llms/llm_caching#cassandra-caches) (scroll to the Cassandra section). + + +## Semantic LLM Cache + +```python +from langchain.globals import set_llm_cache +from langchain_community.cache import CassandraSemanticCache +set_llm_cache(CassandraSemanticCache( + embedding=my_embedding, + table_name="my_store", +)) +``` + +Learn more in the [example notebook](/docs/integrations/llms/llm_caching#cassandra-caches) (scroll to the appropriate section). + +## Document loader + +```python +from langchain_community.document_loaders import CassandraLoader +loader = CassandraLoader(table="my_table") +docs = loader.load() +``` + +Learn more in the [example notebook](/docs/integrations/document_loaders/cassandra). + +#### Attribution statement + +> Apache Cassandra, Cassandra and Apache are either registered trademarks or trademarks of the [Apache Software Foundation](http://www.apache.org/) in the United States and/or other countries. diff --git a/docs/docs/integrations/providers/oci.mdx b/docs/docs/integrations/providers/oci.mdx new file mode 100644 index 0000000000000..e0b3570028f4f --- /dev/null +++ b/docs/docs/integrations/providers/oci.mdx @@ -0,0 +1,58 @@ +# Oracle Cloud Infrastructure (OCI) + +The `LangChain` integrations related to [Oracle Cloud Infrastructure](https://www.oracle.com/artificial-intelligence/). + +## LLMs + +### OCI Generative AI +> Oracle Cloud Infrastructure (OCI) [Generative AI](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm) is a fully managed service that provides a set of state-of-the-art, +> customizable large language models (LLMs) that cover a wide range of use cases, and which are available through a single API. +> Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned +> custom models based on your own data on dedicated AI clusters. + +To use, you should have the latest `oci` python SDK installed. + +```bash +pip install -U oci +``` + +See [usage examples](/docs/integrations/llms/oci_generative_ai). + +```python +from langchain_community.llms import OCIGenAI +``` + +### OCI Data Science Model Deployment Endpoint + +> [OCI Data Science](https://docs.oracle.com/en-us/iaas/data-science/using/home.htm) is a +> fully managed and serverless platform for data science teams. Using the OCI Data Science +> platform you can build, train, and manage machine learning models, and then deploy them +> as an OCI Model Deployment Endpoint using the +> [OCI Data Science Model Deployment Service](https://docs.oracle.com/en-us/iaas/data-science/using/model-dep-about.htm). + +If you deployed a LLM with the VLLM or TGI framework, you can use the +`OCIModelDeploymentVLLM` or `OCIModelDeploymentTGI` classes to interact with it. + +To use, you should have the latest `oracle-ads` python SDK installed. + +```bash +pip install -U oracle-ads +``` + +See [usage examples](/docs/integrations/llms/oci_model_deployment_endpoint). + +```python +from langchain_community.llms import OCIModelDeploymentVLLM + +from langchain_community.llms import OCIModelDeploymentTGI +``` + +## Text Embedding Models + +### OCI Generative AI + +See [usage examples](/docs/integrations/text_embedding/oci_generative_ai). + +```python +from langchain_community.embeddings import OCIGenAIEmbeddings +``` \ No newline at end of file diff --git a/docs/docs/integrations/providers/optimum_intel.mdx b/docs/docs/integrations/providers/optimum_intel.mdx new file mode 100644 index 0000000000000..9d112a9d2a9ea --- /dev/null +++ b/docs/docs/integrations/providers/optimum_intel.mdx @@ -0,0 +1,26 @@ +# Optimum-intel + +All functionality related to the [optimum-intel](https://github.com/huggingface/optimum-intel.git) and [IPEX](https://github.com/intel/intel-extension-for-pytorch). + +## Installation + +Install using optimum-intel and ipex using: + +```bash +pip install optimum[neural-compressor] +pip install intel_extension_for_pytorch +``` + +Please follow the installation instructions as specified below: + +* Install optimum-intel as shown [here](https://github.com/huggingface/optimum-intel). +* Install IPEX as shown [here](https://intel.github.io/intel-extension-for-pytorch/index.html#installation?platform=cpu&version=v2.2.0%2Bcpu). + +## Embedding Models + +See a [usage example](/docs/integrations/text_embedding/optimum_intel). +We also offer a full tutorial notebook "rag_with_quantized_embeddings.ipynb" for using the embedder in a RAG pipeline in the cookbook dir. + +```python +from langchain_community.embeddings import QuantizedBiEncoderEmbeddings +``` \ No newline at end of file diff --git a/docs/docs/integrations/retrievers/flashrank-reranker.ipynb b/docs/docs/integrations/retrievers/flashrank-reranker.ipynb new file mode 100644 index 0000000000000..7f53ed00a687f --- /dev/null +++ b/docs/docs/integrations/retrievers/flashrank-reranker.ipynb @@ -0,0 +1,475 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "# Flashrank Reranker\n", + "\n", + "This notebook shows how to use [flashrank](https://github.com/PrithivirajDamodaran/FlashRank) for document compression and retrieval." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false, + "pycharm": { + "is_executing": true + } + }, + "outputs": [], + "source": [ + "% pip install --upgrade --quiet flashrank\n", + "% pip install --upgrade --quiet faiss\n", + "\n", + "# OR (depending on Python version)\n", + "\n", + "% pip install --upgrade --quiet faiss_cpu" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# Helper function for printing docs\n", + "\n", + "\n", + "def pretty_print_docs(docs):\n", + " print(\n", + " f\"\\n{'-' * 100}\\n\".join(\n", + " [f\"Document {i+1}:\\n\\n\" + d.page_content for i, d in enumerate(docs)]\n", + " )\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "## Set up the base vector store retriever\n", + "Let's start by initializing a simple vector store retriever and storing the 2023 State of the Union speech (in chunks). We can set up the retriever to retrieve a high number (20) of docs." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = getpass.getpass()" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Document 1:\n", + "\n", + "One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n", + "\n", + "And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nationโ€™s top legal minds, who will continue Justice Breyerโ€™s legacy of excellence.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 2:\n", + "\n", + "As I said last year, especially to our younger transgender Americans, I will always have your back as your President, so you can be yourself and reach your God-given potential. \n", + "\n", + "While it often appears that we never agree, that isnโ€™t true. I signed 80 bipartisan bills into law last year. From preventing government shutdowns to protecting Asian-Americans from still-too-common hate crimes to reforming military justice.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 3:\n", + "\n", + "A former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since sheโ€™s been nominated, sheโ€™s received a broad range of supportโ€”from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n", + "\n", + "And if we are to advance liberty and justice, we need to secure the Border and fix the immigration system.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 4:\n", + "\n", + "He met the Ukrainian people. \n", + "\n", + "From President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. \n", + "\n", + "Groups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. \n", + "\n", + "In this struggle as President Zelenskyy said in his speech to the European Parliament โ€œLight will win over darkness.โ€ The Ukrainian Ambassador to the United States is here tonight.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 5:\n", + "\n", + "But that trickle-down theory led to weaker economic growth, lower wages, bigger deficits, and the widest gap between those at the top and everyone else in nearly a century. \n", + "\n", + "Vice President Harris and I ran for office with a new economic vision for America. \n", + "\n", + "Invest in America. Educate Americans. Grow the workforce. Build the economy from the bottom up \n", + "and the middle out, not from the top down.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 6:\n", + "\n", + "And tonight, Iโ€™m announcing that the Justice Department will name a chief prosecutor for pandemic fraud. \n", + "\n", + "By the end of this year, the deficit will be down to less than half what it was before I took office. \n", + "\n", + "The only president ever to cut the deficit by more than one trillion dollars in a single year. \n", + "\n", + "Lowering your costs also means demanding more competition. \n", + "\n", + "Iโ€™m a capitalist, but capitalism without competition isnโ€™t capitalism. \n", + "\n", + "Itโ€™s exploitationโ€”and it drives up prices.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 7:\n", + "\n", + "I spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n", + "\n", + "Iโ€™ve worked on these issues a long time. \n", + "\n", + "I know what works: Investing in crime prevention and community police officers whoโ€™ll walk the beat, whoโ€™ll know the neighborhood, and who can restore trust and safety. \n", + "\n", + "So letโ€™s not abandon our streets. Or choose between safety and equal justice.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 8:\n", + "\n", + "As Iโ€™ve told Xi Jinping, it is never a good bet to bet against the American people. \n", + "\n", + "Weโ€™ll create good jobs for millions of Americans, modernizing roads, airports, ports, and waterways all across America. \n", + "\n", + "And weโ€™ll do it all to withstand the devastating effects of the climate crisis and promote environmental justice.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 9:\n", + "\n", + "Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. \n", + "\n", + "Last year COVID-19 kept us apart. This year we are finally together again. \n", + "\n", + "Tonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. \n", + "\n", + "With a duty to one another to the American people to the Constitution. \n", + "\n", + "And with an unwavering resolve that freedom will always triumph over tyranny.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 10:\n", + "\n", + "As Ohio Senator Sherrod Brown says, โ€œItโ€™s time to bury the label โ€œRust Belt.โ€ \n", + "\n", + "Itโ€™s time. \n", + "\n", + "But with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. \n", + "\n", + "Inflation is robbing them of the gains they might otherwise feel. \n", + "\n", + "I get it. Thatโ€™s why my top priority is getting prices under control.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 11:\n", + "\n", + "Iโ€™m also calling on Congress: pass a law to make sure veterans devastated by toxic exposures in Iraq and Afghanistan finally get the benefits and comprehensive health care they deserve. \n", + "\n", + "And fourth, letโ€™s end cancer as we know it. \n", + "\n", + "This is personal to me and Jill, to Kamala, and to so many of you. \n", + "\n", + "Cancer is the #2 cause of death in Americaโ€“second only to heart disease.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 12:\n", + "\n", + "Headaches. Numbness. Dizziness. \n", + "\n", + "A cancer that would put them in a flag-draped coffin. \n", + "\n", + "I know. \n", + "\n", + "One of those soldiers was my son Major Beau Biden. \n", + "\n", + "We donโ€™t know for sure if a burn pit was the cause of his brain cancer, or the diseases of so many of our troops. \n", + "\n", + "But Iโ€™m committed to finding out everything we can. \n", + "\n", + "Committed to military families like Danielle Robinson from Ohio. \n", + "\n", + "The widow of Sergeant First Class Heath Robinson.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 13:\n", + "\n", + "He will never extinguish their love of freedom. He will never weaken the resolve of the free world. \n", + "\n", + "We meet tonight in an America that has lived through two of the hardest years this nation has ever faced. \n", + "\n", + "The pandemic has been punishing. \n", + "\n", + "And so many families are living paycheck to paycheck, struggling to keep up with the rising cost of food, gas, housing, and so much more. \n", + "\n", + "I understand.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 14:\n", + "\n", + "When we invest in our workers, when we build the economy from the bottom up and the middle out together, we can do something we havenโ€™t done in a long time: build a better America. \n", + "\n", + "For more than two years, COVID-19 has impacted every decision in our lives and the life of the nation. \n", + "\n", + "And I know youโ€™re tired, frustrated, and exhausted. \n", + "\n", + "But I also know this.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 15:\n", + "\n", + "My plan to fight inflation will lower your costs and lower the deficit. \n", + "\n", + "17 Nobel laureates in economics say my plan will ease long-term inflationary pressures. Top business leaders and most Americans support my plan. And hereโ€™s the plan: \n", + "\n", + "First โ€“ cut the cost of prescription drugs. Just look at insulin. One in ten Americans has diabetes. In Virginia, I met a 13-year-old boy named Joshua Davis.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 16:\n", + "\n", + "And soon, weโ€™ll strengthen the Violence Against Women Act that I first wrote three decades ago. It is important for us to show the nation that we can come together and do big things. \n", + "\n", + "So tonight Iโ€™m offering a Unity Agenda for the Nation. Four big things we can do together. \n", + "\n", + "First, beat the opioid epidemic. \n", + "\n", + "There is so much we can do. Increase funding for prevention, treatment, harm reduction, and recovery.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 17:\n", + "\n", + "So letโ€™s not abandon our streets. Or choose between safety and equal justice. \n", + "\n", + "Letโ€™s come together to protect our communities, restore trust, and hold law enforcement accountable. \n", + "\n", + "Thatโ€™s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 18:\n", + "\n", + "My plan will not only lower costs to give families a fair shot, it will lower the deficit. \n", + "\n", + "The previous Administration not only ballooned the deficit with tax cuts for the very wealthy and corporations, it undermined the watchdogs whose job was to keep pandemic relief funds from being wasted. \n", + "\n", + "But in my administration, the watchdogs have been welcomed back. \n", + "\n", + "Weโ€™re going after the criminals who stole billions in relief money meant for small businesses and millions of Americans.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 19:\n", + "\n", + "I understand. \n", + "\n", + "I remember when my Dad had to leave our home in Scranton, Pennsylvania to find work. I grew up in a family where if the price of food went up, you felt it. \n", + "\n", + "Thatโ€™s why one of the first things I did as President was fight to pass the American Rescue Plan. \n", + "\n", + "Because people were hurting. We needed to act, and we did. \n", + "\n", + "Few pieces of legislation have done more in a critical moment in our history to lift us out of crisis.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 20:\n", + "\n", + "And we will, as one people. \n", + "\n", + "One America. \n", + "\n", + "The United States of America. \n", + "\n", + "May God bless you all. May God protect our troops.\n" + ] + } + ], + "source": [ + "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", + "from langchain_community.document_loaders import TextLoader\n", + "from langchain_community.vectorstores import FAISS\n", + "from langchain_openai import OpenAIEmbeddings\n", + "\n", + "documents = TextLoader(\n", + " \"../../modules/state_of_the_union.txt\",\n", + ").load()\n", + "text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=100)\n", + "texts = text_splitter.split_documents(documents)\n", + "\n", + "embedding = OpenAIEmbeddings(model=\"text-embedding-ada-002\")\n", + "retriever = FAISS.from_documents(texts, embedding).as_retriever(search_kwargs={\"k\": 20})\n", + "\n", + "query = \"What did the president say about Ketanji Brown Jackson\"\n", + "docs = retriever.get_relevant_documents(query)\n", + "pretty_print_docs(docs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "## Doing reranking with FlashRank\n", + "Now let's wrap our base retriever with a `ContextualCompressionRetriever`, using `FlashrankRerank` as a compressor." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[0, 3, 7]\n" + ] + } + ], + "source": [ + "from langchain.retrievers import ContextualCompressionRetriever\n", + "from langchain.retrievers.document_compressors import FlashrankRerank\n", + "from langchain_openai import ChatOpenAI\n", + "\n", + "llm = ChatOpenAI(temperature=0)\n", + "\n", + "compressor = FlashrankRerank()\n", + "compression_retriever = ContextualCompressionRetriever(\n", + " base_compressor=compressor, base_retriever=retriever\n", + ")\n", + "\n", + "compressed_docs = compression_retriever.get_relevant_documents(\n", + " \"What did the president say about Ketanji Jackson Brown\"\n", + ")\n", + "print([doc.metadata[\"id\"] for doc in compressed_docs])" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "After reranking, the top 3 documents are different from the top 3 documents retrieved by the base retriever." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Document 1:\n", + "\n", + "One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n", + "\n", + "And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nationโ€™s top legal minds, who will continue Justice Breyerโ€™s legacy of excellence.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 2:\n", + "\n", + "And tonight, Iโ€™m announcing that the Justice Department will name a chief prosecutor for pandemic fraud. \n", + "\n", + "By the end of this year, the deficit will be down to less than half what it was before I took office. \n", + "\n", + "The only president ever to cut the deficit by more than one trillion dollars in a single year. \n", + "\n", + "Lowering your costs also means demanding more competition. \n", + "\n", + "Iโ€™m a capitalist, but capitalism without competition isnโ€™t capitalism. \n", + "\n", + "Itโ€™s exploitationโ€”and it drives up prices.\n", + "----------------------------------------------------------------------------------------------------\n", + "Document 3:\n", + "\n", + "As Ohio Senator Sherrod Brown says, โ€œItโ€™s time to bury the label โ€œRust Belt.โ€ \n", + "\n", + "Itโ€™s time. \n", + "\n", + "But with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. \n", + "\n", + "Inflation is robbing them of the gains they might otherwise feel. \n", + "\n", + "I get it. Thatโ€™s why my top priority is getting prices under control.\n" + ] + } + ], + "source": [ + "pretty_print_docs(compressed_docs)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "collapsed": false + }, + "source": [ + "## QA reranking with FlashRank" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from langchain.chains import RetrievalQA\n", + "\n", + "chain = RetrievalQA.from_chain_type(llm=llm, retriever=compression_retriever)" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": "{'query': 'What did the president say about Ketanji Brown Jackson',\n 'result': \"The President said that Ketanji Brown Jackson is one of our nation's top legal minds and will continue Justice Breyer's legacy of excellence.\"}" + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "chain.invoke(query)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 2 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython2", + "version": "2.7.6" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/docs/docs/integrations/retrievers/tf_idf.ipynb b/docs/docs/integrations/retrievers/tf_idf.ipynb index 3d1d97b277c3c..9576f8b0dfe8e 100644 --- a/docs/docs/integrations/retrievers/tf_idf.ipynb +++ b/docs/docs/integrations/retrievers/tf_idf.ipynb @@ -33,7 +33,7 @@ }, "outputs": [], "source": [ - "from langchain.retrievers import TFIDFRetriever" + "from langchain_community.retrievers import TFIDFRetriever" ] }, { diff --git a/docs/docs/integrations/text_embedding/ai21.ipynb b/docs/docs/integrations/text_embedding/ai21.ipynb new file mode 100644 index 0000000000000..c44e55f2b497d --- /dev/null +++ b/docs/docs/integrations/text_embedding/ai21.ipynb @@ -0,0 +1,138 @@ +{ + "cells": [ + { + "cell_type": "raw", + "id": "c2923bd1", + "metadata": {}, + "source": [ + "---\n", + "sidebar_label: AI21 Labs\n", + "---" + ] + }, + { + "cell_type": "markdown", + "id": "cc3c6ef6bbd57ce9", + "metadata": { + "collapsed": false + }, + "source": [ + "# AI21Embeddings\n", + "\n", + "This notebook covers how to get started with AI21 embedding models.\n", + "\n", + "## Installation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4c3bef91", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -qU langchain-ai21" + ] + }, + { + "cell_type": "markdown", + "id": "2b4f3e15", + "metadata": {}, + "source": [ + "## Environment Setup\n", + "\n", + "We'll need to get a [AI21 API key](https://docs.ai21.com/) and set the `AI21_API_KEY` environment variable:\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62e0dbc3", + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"AI21_API_KEY\"] = getpass()" + ] + }, + { + "cell_type": "markdown", + "id": "74ef9d8b40a1319e", + "metadata": { + "collapsed": false + }, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "12fcfb4b", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_ai21 import AI21Embeddings\n", + "\n", + "embeddings = AI21Embeddings()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f2e6104", + "metadata": {}, + "outputs": [], + "source": [ + "embeddings.embed_query(\"My query to look up\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a3465d7e63bfb3d1", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "embeddings.embed_documents(\n", + " [\"This is a content of the document\", \"This is another document\"]\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9d60af6d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/text_embedding/google_generative_ai.ipynb b/docs/docs/integrations/text_embedding/google_generative_ai.ipynb index 7cac7e42b8f05..7afdf8b34fd93 100644 --- a/docs/docs/integrations/text_embedding/google_generative_ai.ipynb +++ b/docs/docs/integrations/text_embedding/google_generative_ai.ipynb @@ -200,7 +200,7 @@ "id": "2e7857e5", "metadata": {}, "source": [ - "## Additional Configuraation\n", + "## Additional Configuration\n", "\n", "You can pass the following parameters to ChatGoogleGenerativeAI in order to customize the SDK's behavior:\n", "\n", diff --git a/docs/docs/integrations/text_embedding/nemo.ipynb b/docs/docs/integrations/text_embedding/nemo.ipynb new file mode 100644 index 0000000000000..4868985e882a2 --- /dev/null +++ b/docs/docs/integrations/text_embedding/nemo.ipynb @@ -0,0 +1,121 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "abede47c-6a58-40c3-b7ef-10966a4fc085", + "metadata": {}, + "source": [ + "# NVIDIA NeMo embeddings" + ] + }, + { + "cell_type": "markdown", + "id": "38f3d4ce-b36a-48c6-88b0-5970c26bb146", + "metadata": {}, + "source": [ + "Connect to NVIDIA's embedding service using the `NeMoEmbeddings` class.\n", + "\n", + "The NeMo Retriever Embedding Microservice (NREM) brings the power of state-of-the-art text embedding to your applications, providing unmatched natural language processing and understanding capabilities. Whether you're developing semantic search, Retrieval Augmented Generation (RAG) pipelinesโ€”or any application that needs to use text embeddingsโ€”NREM has you covered. Built on the NVIDIA software platform incorporating CUDA, TensorRT, and Triton, NREM brings state of the art GPU accelerated Text Embedding model serving.\n", + "\n", + "NREM uses NVIDIA's TensorRT built on top of the Triton Inference Server for optimized inference of text embedding models." + ] + }, + { + "cell_type": "markdown", + "id": "f5ab6ea1-d074-4f36-ae45-50312a6a82b9", + "metadata": {}, + "source": [ + "## Imports" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "32deab16-530d-455c-b40c-914db048cb05", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.embeddings import NeMoEmbeddings" + ] + }, + { + "cell_type": "markdown", + "id": "de40023c-3391-474d-96cf-fbfb2311e9d7", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "37177018-47f4-48be-8575-83ce5c9a5447", + "metadata": {}, + "outputs": [], + "source": [ + "batch_size = 16\n", + "model = \"NV-Embed-QA-003\"\n", + "api_endpoint_url = \"http://localhost:8080/v1/embeddings\"" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "id": "08161ed2-8ba3-4226-a387-15c348f8c343", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Checking if endpoint is live: http://localhost:8080/v1/embeddings\n" + ] + } + ], + "source": [ + "embedding_model = NeMoEmbeddings(\n", + " batch_size=batch_size, model=model, api_endpoint_url=api_endpoint_url\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c69070c3-fe2d-4ff7-be4a-73304e2c4f3e", + "metadata": {}, + "outputs": [], + "source": [ + "embedding_model.embed_query(\"This is a test.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5d1d8852-5298-40b5-89c4-5a91ccfc95e5", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.1" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/text_embedding/nomic.ipynb b/docs/docs/integrations/text_embedding/nomic.ipynb index 6f6b11d4aa51f..6f731668c30fb 100644 --- a/docs/docs/integrations/text_embedding/nomic.ipynb +++ b/docs/docs/integrations/text_embedding/nomic.ipynb @@ -58,7 +58,7 @@ "source": [ "from langchain_nomic.embeddings import NomicEmbeddings\n", "\n", - "embeddings = NomicEmbeddings(model=\"nomic-embed-text-v1\")" + "embeddings = NomicEmbeddings(model=\"nomic-embed-text-v1.5\")" ] }, { @@ -106,6 +106,28 @@ " [\"This is a content of the document\", \"This is another document\"]\n", ")" ] + }, + { + "cell_type": "markdown", + "id": "7a331dc3", + "metadata": {}, + "source": [ + "### Custom Dimensionality\n", + "\n", + "Nomic's `nomic-embed-text-v1.5` model was [trained with Matryoshka learning](https://blog.nomic.ai/posts/nomic-embed-matryoshka) to enable variable-length embeddings with a single model. This means that you can specify the dimensionality of the embeddings at inference time. The model supports dimensionality from 64 to 768." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "993f65c8", + "metadata": {}, + "outputs": [], + "source": [ + "embeddings = NomicEmbeddings(model=\"nomic-embed-text-v1.5\", dimensionality=256)\n", + "\n", + "embeddings.embed_query(\"My query to look up\")" + ] } ], "metadata": { diff --git a/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb b/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb new file mode 100755 index 0000000000000..ae935e4d354dc --- /dev/null +++ b/docs/docs/integrations/text_embedding/oci_generative_ai.ipynb @@ -0,0 +1,140 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Oracle Cloud Infrastructure Generative AI" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Oracle Cloud Infrastructure (OCI) Generative AI is a fully managed service that provides a set of state-of-the-art, customizable large language models (LLMs), that cover a wide range of use cases, and which are available through a single API.\n", + "Using the OCI Generative AI service you can access ready-to-use pretrained models, or create and host your own fine-tuned custom models based on your own data on dedicated AI clusters. Detailed documentation of the service and API is available __[here](https://docs.oracle.com/en-us/iaas/Content/generative-ai/home.htm)__ and __[here](https://docs.oracle.com/en-us/iaas/api/#/en/generative-ai/20231130/)__.\n", + "\n", + "This notebook explains how to use OCI's Genrative AI models with LangChain." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Prerequisite\n", + "We will need to install the oci sdk" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install -U oci" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### OCI Generative AI API endpoint \n", + "https://inference.generativeai.us-chicago-1.oci.oraclecloud.com" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Authentication\n", + "The authentication methods supported for this langchain integration are:\n", + "\n", + "1. API Key\n", + "2. Session token\n", + "3. Instance principal\n", + "4. Resource principal \n", + "\n", + "These follows the standard SDK authentication methods detailed __[here](https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdk_authentication_methods.htm)__.\n", + " " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Usage" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.embeddings import OCIGenAIEmbeddings\n", + "\n", + "# use default authN method API-key\n", + "embeddings = OCIGenAIEmbeddings(\n", + " model_id=\"MY_EMBEDDING_MODEL\",\n", + " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", + " compartment_id=\"MY_OCID\",\n", + ")\n", + "\n", + "\n", + "query = \"This is a query in English.\"\n", + "response = embeddings.embed_query(query)\n", + "print(response)\n", + "\n", + "documents = [\"This is a sample document\", \"and here is another one\"]\n", + "response = embeddings.embed_documents(documents)\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Use Session Token to authN\n", + "embeddings = OCIGenAIEmbeddings(\n", + " model_id=\"MY_EMBEDDING_MODEL\",\n", + " service_endpoint=\"https://inference.generativeai.us-chicago-1.oci.oraclecloud.com\",\n", + " compartment_id=\"MY_OCID\",\n", + " auth_type=\"SECURITY_TOKEN\",\n", + " auth_profile=\"MY_PROFILE\", # replace with your profile name\n", + ")\n", + "\n", + "\n", + "query = \"This is a sample query\"\n", + "response = embeddings.embed_query(query)\n", + "print(response)\n", + "\n", + "documents = [\"This is a sample document\", \"and here is another one\"]\n", + "response = embeddings.embed_documents(documents)\n", + "print(response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "oci_langchain", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.9" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/integrations/text_embedding/optimum_intel.ipynb b/docs/docs/integrations/text_embedding/optimum_intel.ipynb new file mode 100644 index 0000000000000..bb4df8fcd73be --- /dev/null +++ b/docs/docs/integrations/text_embedding/optimum_intel.ipynb @@ -0,0 +1,201 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ae6f9d9d-fe44-489c-9661-dac69683dcd2", + "metadata": {}, + "source": [ + "# Embedding Documents using Optimized and Quantized Embedders\n", + "\n", + "Embedding all documents using Quantized Embedders.\n", + "\n", + "The embedders are based on optimized models, created by using [optimum-intel](https://github.com/huggingface/optimum-intel.git) and [IPEX](https://github.com/intel/intel-extension-for-pytorch).\n", + "\n", + "Example text is based on [SBERT](https://www.sbert.net/docs/pretrained_cross-encoders.html)." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "b9d1a3bb-83b1-4029-ad8d-411db1fba034", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "loading configuration file inc_config.json from cache at \n", + "INCConfig {\n", + " \"distillation\": {},\n", + " \"neural_compressor_version\": \"2.4.1\",\n", + " \"optimum_version\": \"1.16.2\",\n", + " \"pruning\": {},\n", + " \"quantization\": {\n", + " \"dataset_num_samples\": 50,\n", + " \"is_static\": true\n", + " },\n", + " \"save_onnx_model\": false,\n", + " \"torch_version\": \"2.2.0\",\n", + " \"transformers_version\": \"4.37.2\"\n", + "}\n", + "\n", + "Using `INCModel` to load a TorchScript model will be deprecated in v1.15.0, to load your model please use `IPEXModel` instead.\n" + ] + } + ], + "source": [ + "from langchain_community.embeddings import QuantizedBiEncoderEmbeddings\n", + "\n", + "model_name = \"Intel/bge-small-en-v1.5-rag-int8-static\"\n", + "encode_kwargs = {\"normalize_embeddings\": True} # set True to compute cosine similarity\n", + "\n", + "model = QuantizedBiEncoderEmbeddings(\n", + " model_name=model_name,\n", + " encode_kwargs=encode_kwargs,\n", + " query_instruction=\"Represent this sentence for searching relevant passages: \",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "34318164-7a6f-47b6-8690-3b1d71e1fcfc", + "metadata": {}, + "source": [ + "Lets ask a question, and compare to 2 documents. The first contains the answer to the question, and the second one does not. \n", + "\n", + "We can check better suits our query." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "55ff07ca-fb44-4dcf-b2d3-dde021a53983", + "metadata": {}, + "outputs": [], + "source": [ + "question = \"How many people live in Berlin?\"" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "aebef832-5534-440c-a4a8-4bf56ccd8ad4", + "metadata": {}, + "outputs": [], + "source": [ + "documents = [\n", + " \"Berlin had a population of 3,520,031 registered inhabitants in an area of 891.82 square kilometers.\",\n", + " \"Berlin is well known for its museums.\",\n", + "]" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "4eec7eda-0d9b-4488-a0e8-3eedd28ab0b1", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Batches: 100%|โ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆโ–ˆ| 1/1 [00:00<00:00, 4.18it/s]\n" + ] + } + ], + "source": [ + "doc_vecs = model.embed_documents(documents)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "8e6dac72-5a0b-4421-9454-aa0a49b20c66", + "metadata": {}, + "outputs": [], + "source": [ + "query_vec = model.embed_query(question)" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "ec26eb7a-a259-4bb9-b9d8-9ff345a8c798", + "metadata": {}, + "outputs": [], + "source": [ + "import torch" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "9ca1ee83-2a6a-4f65-bc2f-3942a0c068c6", + "metadata": {}, + "outputs": [], + "source": [ + "doc_vecs_torch = torch.tensor(doc_vecs)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "4f6a1986-339e-443a-a2f6-ae3f3ad4266c", + "metadata": {}, + "outputs": [], + "source": [ + "query_vec_torch = torch.tensor(query_vec)" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "id": "2b49446e-1336-46b3-b9ef-af56b4870876", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "tensor([0.7980, 0.6529])" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "query_vec_torch @ doc_vecs_torch.T" + ] + }, + { + "cell_type": "markdown", + "id": "6cc1ac2a-9641-408e-a373-736d121fc3c7", + "metadata": {}, + "source": [ + "We can see that indeed the first one ranks higher." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.18" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/toolkits/cogniswitch.ipynb b/docs/docs/integrations/toolkits/cogniswitch.ipynb new file mode 100644 index 0000000000000..836f425cf6055 --- /dev/null +++ b/docs/docs/integrations/toolkits/cogniswitch.ipynb @@ -0,0 +1,326 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "19062701", + "metadata": {}, + "source": [ + "## Cogniswitch Tools\n", + "\n", + "**Use CogniSwitch to build production ready applications that can consume, organize and retrieve knowledge flawlessly. Using the framework of your choice, in this case Langchain CogniSwitch helps alleviate the stress of decision making when it comes to, choosing the right storage and retrieval formats. It also eradicates reliability issues and hallucinations when it comes to responses that are generated. Get started by interacting with your knowledge in just two simple steps.**\n", + "\n", + "visit [https://www.cogniswitch.ai/developer to register](https://www.cogniswitch.ai/developer?utm_source=langchain&utm_medium=langchainbuild&utm_id=dev).\n\n", + "**Registration:** \n\n", + "- Signup with your email and verify your registration \n\n", + "- You will get a mail with a platform token and oauth token for using the services.\n\n\n", + "\n", + "**step 1: Instantiate the toolkit and get the tools:**\n\n", + "- Instantiate the cogniswitch toolkit with the cogniswitch token, openAI API key and OAuth token and get the tools. \n", + "\n", + "**step 2: Instantiate the agent with the tools and llm:**\n", + "- Instantiate the agent with the list of cogniswitch tools and the llm, into the agent executor.\n", + "\n", + "**step 3: CogniSwitch Store Tool:** \n", + "\n", + "***CogniSwitch knowledge source file tool***\n", + "- Use the agent to upload a file by giving the file path.(formats that are currently supported are .pdf, .docx, .doc, .txt, .html) \n", + "- The content from the file will be processed by the cogniswitch and stored in your knowledge store. \n", + "\n", + "***CogniSwitch knowledge source url tool***\n", + "- Use the agent to upload a URL. \n", + "- The content from the url will be processed by the cogniswitch and stored in your knowledge store. \n", + "\n", + "**step 4: CogniSwitch Status Tool:**\n", + "- Use the agent to know the status of the document uploaded with a document name.\n", + "- You can also check the status of document processing in cogniswitch console. \n", + "\n", + "**step 5: CogniSwitch Answer Tool:**\n", + "- Use the agent to ask your question.\n", + "- You will get the answer from your knowledge as the response. \n" + ] + }, + { + "cell_type": "markdown", + "id": "1435b193", + "metadata": {}, + "source": [ + "### Import necessary libraries" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "8d86323b", + "metadata": {}, + "outputs": [], + "source": [ + "import warnings\n", + "\n", + "warnings.filterwarnings(\"ignore\")\n", + "\n", + "import os\n", + "\n", + "from langchain.agents.agent_toolkits import create_conversational_retrieval_agent\n", + "from langchain.chat_models import ChatOpenAI\n", + "from langchain_community.agent_toolkits import CogniswitchToolkit" + ] + }, + { + "cell_type": "markdown", + "id": "6e6acf0e", + "metadata": {}, + "source": [ + "### Cogniswitch platform token, OAuth token and OpenAI API key" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "3d2dfc9f", + "metadata": {}, + "outputs": [], + "source": [ + "cs_token = \"Your CogniSwitch token\"\n", + "OAI_token = \"Your OpenAI API token\"\n", + "oauth_token = \"Your CogniSwitch authentication token\"\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = OAI_token" + ] + }, + { + "cell_type": "markdown", + "id": "320e02fc", + "metadata": {}, + "source": [ + "### Instantiate the cogniswitch toolkit with the credentials" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "89f58167", + "metadata": {}, + "outputs": [], + "source": [ + "cogniswitch_toolkit = CogniswitchToolkit(\n", + " cs_token=cs_token, OAI_token=OAI_token, apiKey=oauth_token\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "16901682", + "metadata": {}, + "source": [ + "### Get the list of cogniswitch tools" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "288d07f6", + "metadata": {}, + "outputs": [], + "source": [ + "tool_lst = cogniswitch_toolkit.get_tools()" + ] + }, + { + "cell_type": "markdown", + "id": "4aae43a3", + "metadata": {}, + "source": [ + "### Instantiate the llm" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "4d67e5bb", + "metadata": {}, + "outputs": [], + "source": [ + "llm = ChatOpenAI(\n", + " temperature=0,\n", + " openai_api_key=OAI_token,\n", + " max_tokens=1500,\n", + " model_name=\"gpt-3.5-turbo-0613\",\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "04179282", + "metadata": {}, + "source": [ + "### Create a agent executor" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "2153e758", + "metadata": {}, + "outputs": [], + "source": [ + "agent_executor = create_conversational_retrieval_agent(llm, tool_lst, verbose=False)" + ] + }, + { + "cell_type": "markdown", + "id": "42c9890e", + "metadata": {}, + "source": [ + "### Invoke the agent to upload a URL" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "794b4fba", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The URL https://cogniswitch.ai/developer has been uploaded successfully. The status of the document is currently being processed. You will receive an email notification once the processing is complete.\n" + ] + } + ], + "source": [ + "response = agent_executor.invoke(\"upload this url https://cogniswitch.ai/developer\")\n", + "\n", + "print(response[\"output\"])" + ] + }, + { + "cell_type": "markdown", + "id": "544fe8f9", + "metadata": {}, + "source": [ + "### Invoke the agent to upload a File" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "fd0addfc", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The file example_file.txt has been uploaded successfully. The status of the document is currently being processed. You will receive an email notification once the processing is complete.\n" + ] + } + ], + "source": [ + "response = agent_executor.invoke(\"upload this file example_file.txt\")\n", + "\n", + "print(response[\"output\"])" + ] + }, + { + "cell_type": "markdown", + "id": "02827e1b", + "metadata": {}, + "source": [ + "### Invoke the agent to get the status of a document" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "f424e6c5", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The status of the document example_file.txt is as follows:\n", + "\n", + "- Created On: 2024-01-22T19:07:42.000+00:00\n", + "- Modified On: 2024-01-22T19:07:42.000+00:00\n", + "- Document Entry ID: 153\n", + "- Status: 0 (Processing)\n", + "- Original File Name: example_file.txt\n", + "- Saved File Name: 1705950460069example_file29393011.txt\n", + "\n", + "The document is currently being processed.\n" + ] + } + ], + "source": [ + "response = agent_executor.invoke(\"Tell me the status of this document example_file.txt\")\n", + "\n", + "print(response[\"output\"])" + ] + }, + { + "cell_type": "markdown", + "id": "0ba9aca9", + "metadata": {}, + "source": [ + "### Invoke the agent with query and get the answer" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "e73e963f", + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CogniSwitch can help develop GenAI applications in several ways:\n", + "\n", + "1. Knowledge Extraction: CogniSwitch can extract knowledge from various sources such as documents, websites, and databases. It can analyze and store data from these sources, making it easier to access and utilize the information for GenAI applications.\n", + "\n", + "2. Natural Language Processing: CogniSwitch has advanced natural language processing capabilities. It can understand and interpret human language, allowing GenAI applications to interact with users in a more conversational and intuitive manner.\n", + "\n", + "3. Sentiment Analysis: CogniSwitch can analyze the sentiment of text data, such as customer reviews or social media posts. This can be useful in developing GenAI applications that can understand and respond to the emotions and opinions of users.\n", + "\n", + "4. Knowledge Base Integration: CogniSwitch can integrate with existing knowledge bases or create new ones. This allows GenAI applications to access a vast amount of information and provide accurate and relevant responses to user queries.\n", + "\n", + "5. Document Analysis: CogniSwitch can analyze documents and extract key information such as entities, relationships, and concepts. This can be valuable in developing GenAI applications that can understand and process large amounts of textual data.\n", + "\n", + "Overall, CogniSwitch provides a range of AI-powered capabilities that can enhance the development of GenAI applications by enabling knowledge extraction, natural language processing, sentiment analysis, knowledge base integration, and document analysis.\n" + ] + } + ], + "source": [ + "response = agent_executor.invoke(\"How can cogniswitch help develop GenAI applications?\")\n", + "\n", + "print(response[\"output\"])" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "langchain_repo", + "language": "python", + "name": "langchain_repo" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.17" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/tools/filesystem.ipynb b/docs/docs/integrations/tools/filesystem.ipynb index c7599a0019d2c..56bf91b4c3673 100644 --- a/docs/docs/integrations/tools/filesystem.ipynb +++ b/docs/docs/integrations/tools/filesystem.ipynb @@ -56,13 +56,13 @@ { "data": { "text/plain": [ - "[CopyFileTool(name='copy_file', description='Create a copy of a file in a specified location', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " DeleteFileTool(name='file_delete', description='Delete a file', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " FileSearchTool(name='file_search', description='Recursively search for files in a subdirectory that match the regex pattern', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " MoveFileTool(name='move_file', description='Move or rename a file from one location to another', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " ReadFileTool(name='read_file', description='Read file from disk', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " WriteFileTool(name='write_file', description='Write file to disk', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " ListDirectoryTool(name='list_directory', description='List files and directories in a specified folder', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug')]" + "[CopyFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " DeleteFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " FileSearchTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " MoveFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " ReadFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " WriteFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " ListDirectoryTool(root_dir='/tmp/tmprdvsw3tg')]" ] }, "execution_count": 2, @@ -96,9 +96,9 @@ { "data": { "text/plain": [ - "[ReadFileTool(name='read_file', description='Read file from disk', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " WriteFileTool(name='write_file', description='Write file to disk', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug'),\n", - " ListDirectoryTool(name='list_directory', description='List files and directories in a specified folder', args_schema=, return_direct=False, verbose=False, callback_manager=, root_dir='/var/folders/gf/6rnp_mbx5914kx7qmmh7xzmw0000gn/T/tmpxb8c3aug')]" + "[ReadFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " WriteFileTool(root_dir='/tmp/tmprdvsw3tg'),\n", + " ListDirectoryTool(root_dir='/tmp/tmprdvsw3tg')]" ] }, "execution_count": 3, @@ -132,7 +132,7 @@ ], "source": [ "read_tool, write_tool, list_tool = tools\n", - "write_tool.run({\"file_path\": \"example.txt\", \"text\": \"Hello World!\"})" + "write_tool.invoke({\"file_path\": \"example.txt\", \"text\": \"Hello World!\"})" ] }, { @@ -153,7 +153,7 @@ ], "source": [ "# List files in the working directory\n", - "list_tool.run({})" + "list_tool.invoke({})" ] }, { @@ -180,7 +180,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.9.18" } }, "nbformat": 4, diff --git a/docs/docs/integrations/tools/google_search.ipynb b/docs/docs/integrations/tools/google_search.ipynb index c8bb72cbbfc4f..b1302db06f18d 100644 --- a/docs/docs/integrations/tools/google_search.ipynb +++ b/docs/docs/integrations/tools/google_search.ipynb @@ -40,7 +40,7 @@ "search = GoogleSearchAPIWrapper()\n", "\n", "tool = Tool(\n", - " name=\"Google Search\",\n", + " name=\"google_search\",\n", " description=\"Search Google for recent results.\",\n", " func=search.run,\n", ")" diff --git a/docs/docs/integrations/tools/pubmed.ipynb b/docs/docs/integrations/tools/pubmed.ipynb index 7487eba02c061..52b5a5e84cd9f 100644 --- a/docs/docs/integrations/tools/pubmed.ipynb +++ b/docs/docs/integrations/tools/pubmed.ipynb @@ -14,7 +14,17 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, + "id": "3e90e50f", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install xmltodict" + ] + }, + { + "cell_type": "code", + "execution_count": 2, "id": "c80b9273", "metadata": {}, "outputs": [], @@ -24,7 +34,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "id": "f203c965", "metadata": {}, "outputs": [], @@ -34,23 +44,23 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "id": "baee7a2a", "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "'Published: 2023May31\\nTitle: Dermatology in the wake of an AI revolution: who gets a say?\\nSummary: \\n\\nPublished: 2023May30\\nTitle: What is ChatGPT and what do we do with it? Implications of the age of AI for nursing and midwifery practice and education: An editorial.\\nSummary: \\n\\nPublished: 2023Jun02\\nTitle: The Impact of ChatGPT on the Nursing Profession: Revolutionizing Patient Care and Education.\\nSummary: The nursing field has undergone notable changes over time and is projected to undergo further modifications in the future, owing to the advent of sophisticated technologies and growing healthcare needs. The advent of ChatGPT, an AI-powered language model, is expected to exert a significant influence on the nursing profession, specifically in the domains of patient care and instruction. The present article delves into the ramifications of ChatGPT within the nursing domain and accentuates its capacity and constraints to transform the discipline.'" + "'Published: 2024-02-10\\nTitle: circEPB41L2 blocks the progression and metastasis in non-small cell lung cancer by promoting TRIP12-triggered PTBP1 ubiquitylation.\\nCopyright Information: ยฉ 2024. The Author(s).\\nSummary::\\nThe metastasis of non-small cell lung cancer (NSCLC) is the leading death cause of NSCLC patients, which requires new biomarkers for precise diagnosis and treatment. Circular RNAs (circRNAs), the novel noncoding RNA, participate in the progression of various cancers as microRNA or protein sponges. We revealed the mechanism by which circEPB41L2 (hsa_circ_0077837) blocks the aerobic glycolysis, progression and metastasis of NSCLC through modulating protein metabolism of PTBP1 by the E3 ubiquitin ligase TRIP12. With ribosomal RNA-depleted RNA seq, 57 upregulated and 327 downregulated circRNAs were identified in LUAD tissues. circEPB41L2 was selected due to its dramatically reduced levels in NSCLC tissues and NSCLC cells. Interestingly, circEPB41L2 blocked glucose uptake, lactate production, NSCLC cell proliferation, migration and invasion in vitro and in vivo. Mechanistically, acting as a scaffold, circEPB41L2 bound to the RRM1 domain of the PTBP1 and the E3 ubiquitin ligase TRIP12 to promote TRIP12-mediated PTBP1 polyubiquitylation and degradation, which could be reversed by the HECT domain mutation of TRIP12 and circEPB41L2 depletion. As a result, circEPB41L2-induced PTBP1 inhibition led to PTBP1-induced PKM2 and Vimentin activation but PKM1 and E-cadherin inactivation. These findings highlight the circEPB41L2-dependent mechanism that modulates the \"Warburg Effect\" and EMT to inhibit NSCLC development and metastasis, offering an inhibitory target for NSCLC treatment.\\n\\nPublished: 2024-01-17\\nTitle: The safety of seasonal influenza vaccination among adults prescribed immune checkpoint inhibitors: A self-controlled case series study using administrative data.\\nCopyright Information: Copyright ยฉ 2024 The Author(s). Published by Elsevier Ltd.. All rights reserv'" ] }, - "execution_count": 3, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "tool.run(\"chatgpt\")" + "tool.invoke(\"What causes lung cancer?\")" ] }, { @@ -78,7 +88,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.11.7" } }, "nbformat": 4, diff --git a/docs/docs/integrations/vectorstores/apache_doris.ipynb b/docs/docs/integrations/vectorstores/apache_doris.ipynb new file mode 100644 index 0000000000000..1cee7b8f5370d --- /dev/null +++ b/docs/docs/integrations/vectorstores/apache_doris.ipynb @@ -0,0 +1,322 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "84180ad0-66cd-43e5-b0b8-2067a29e16ba", + "metadata": { + "collapsed": false + }, + "source": [ + "# Apache Doris\n", + "\n", + ">[Apache Doris](https://doris.apache.org/) is a modern data warehouse for real-time analytics.\n", + "It delivers lightning-fast analytics on real-time data at scale.\n", + "\n", + ">Usually `Apache Doris` is categorized into OLAP, and it has showed excellent performance in [ClickBench โ€” a Benchmark For Analytical DBMS](https://benchmark.clickhouse.com/). Since it has a super-fast vectorized execution engine, it could also be used as a fast vectordb.\n", + "\n", + "Here we'll show how to use the Apache Doris Vector Store." + ] + }, + { + "cell_type": "markdown", + "id": "1685854f", + "metadata": {}, + "source": [ + "## Setup" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "311d44bb-4aca-4f3b-8f97-5e1f29238e40", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet pymysql" + ] + }, + { + "cell_type": "markdown", + "id": "2c891bba", + "metadata": {}, + "source": [ + "Set `update_vectordb = False` at the beginning. If there is no docs updated, then we don't need to rebuild the embeddings of docs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f4e6ca20-79dd-482a-8f68-af9d7dd59c7c", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "!pip install sqlalchemy\n", + "!pip install langchain" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "96f7c7a2-4811-4fdf-87f5-c60772f51fe1", + "metadata": { + "ExecuteTime": { + "end_time": "2024-02-14T12:54:01.392500Z", + "start_time": "2024-02-14T12:53:58.866615Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "from langchain.chains import RetrievalQA\n", + "from langchain.text_splitter import TokenTextSplitter\n", + "from langchain_community.document_loaders import (\n", + " DirectoryLoader,\n", + " UnstructuredMarkdownLoader,\n", + ")\n", + "from langchain_community.vectorstores.apache_doris import (\n", + " ApacheDoris,\n", + " ApacheDorisSettings,\n", + ")\n", + "from langchain_openai import OpenAI, OpenAIEmbeddings\n", + "\n", + "update_vectordb = False" + ] + }, + { + "cell_type": "markdown", + "id": "ee821c00", + "metadata": {}, + "source": [ + "## Load docs and split them into tokens" + ] + }, + { + "cell_type": "markdown", + "id": "34ba0cfd", + "metadata": {}, + "source": [ + "Load all markdown files under the `docs` directory\n", + "\n", + "for Apache Doris documents, you can clone repo from https://github.com/apache/doris, and there is `docs` directory in it." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "799edf20-bcf4-4a65-bff7-b907f6bdba20", + "metadata": { + "ExecuteTime": { + "end_time": "2024-02-14T12:55:24.128917Z", + "start_time": "2024-02-14T12:55:19.463831Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "loader = DirectoryLoader(\n", + " \"./docs\", glob=\"**/*.md\", loader_cls=UnstructuredMarkdownLoader\n", + ")\n", + "documents = loader.load()" + ] + }, + { + "cell_type": "markdown", + "id": "b415fe2a", + "metadata": {}, + "source": [ + "Split docs into tokens, and set `update_vectordb = True` because there are new docs/tokens." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "0dc5ba83-62ef-4f61-a443-e872f251e7da", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "# load text splitter and split docs into snippets of text\n", + "text_splitter = TokenTextSplitter(chunk_size=400, chunk_overlap=50)\n", + "split_docs = text_splitter.split_documents(documents)\n", + "\n", + "# tell vectordb to update text embeddings\n", + "update_vectordb = True" + ] + }, + { + "cell_type": "markdown", + "id": "46966e25-9449-4a36-87d1-c0b25dce2994", + "metadata": { + "collapsed": false + }, + "source": [ + "split_docs[-20]" + ] + }, + { + "cell_type": "markdown", + "id": "99422e95-b407-43eb-aa68-9a62363fc82f", + "metadata": { + "collapsed": false + }, + "source": [ + "print(\"# docs = %d, # splits = %d\" % (len(documents), len(split_docs)))" + ] + }, + { + "cell_type": "markdown", + "id": "e780d77f-3f96-4690-a10f-f87566f7ccc6", + "metadata": { + "collapsed": false + }, + "source": [ + "## Create vectordb instance" + ] + }, + { + "cell_type": "markdown", + "id": "15702d9c", + "metadata": {}, + "source": [ + "### Use Apache Doris as vectordb" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "ced7dbe1", + "metadata": { + "ExecuteTime": { + "end_time": "2024-02-14T12:55:39.508287Z", + "start_time": "2024-02-14T12:55:39.500370Z" + } + }, + "outputs": [], + "source": [ + "def gen_apache_doris(update_vectordb, embeddings, settings):\n", + " if update_vectordb:\n", + " docsearch = ApacheDoris.from_documents(split_docs, embeddings, config=settings)\n", + " else:\n", + " docsearch = ApacheDoris(embeddings, settings)\n", + " return docsearch" + ] + }, + { + "cell_type": "markdown", + "id": "15d86fda", + "metadata": {}, + "source": [ + "## Convert tokens into embeddings and put them into vectordb" + ] + }, + { + "cell_type": "markdown", + "id": "ff1322ea", + "metadata": {}, + "source": [ + "Here we use Apache Doris as vectordb, you can configure Apache Doris instance via `ApacheDorisSettings`.\n", + "\n", + "Configuring Apache Doris instance is pretty much like configuring mysql instance. You need to specify:\n", + "1. host/port\n", + "2. username(default: 'root')\n", + "3. password(default: '')\n", + "4. database(default: 'default')\n", + "5. table(default: 'langchain')" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "b34f8c31-c173-4902-8168-2e838ddfb9e9", + "metadata": { + "ExecuteTime": { + "end_time": "2024-02-14T12:56:02.671291Z", + "start_time": "2024-02-14T12:55:48.350294Z" + }, + "collapsed": false + }, + "outputs": [], + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "os.environ[\"OPENAI_API_KEY\"] = getpass()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c53ab3f2-9e34-4424-8b07-6292bde67e14", + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "update_vectordb = True\n", + "\n", + "embeddings = OpenAIEmbeddings()\n", + "\n", + "# configure Apache Doris settings(host/port/user/pw/db)\n", + "settings = ApacheDorisSettings()\n", + "settings.port = 9030\n", + "settings.host = \"172.30.34.130\"\n", + "settings.username = \"root\"\n", + "settings.password = \"\"\n", + "settings.database = \"langchain\"\n", + "docsearch = gen_apache_doris(update_vectordb, embeddings, settings)\n", + "\n", + "print(docsearch)\n", + "\n", + "update_vectordb = False" + ] + }, + { + "cell_type": "markdown", + "id": "bde66626", + "metadata": {}, + "source": [ + "## Build QA and ask question to it" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "84921814", + "metadata": {}, + "outputs": [], + "source": [ + "llm = OpenAI()\n", + "qa = RetrievalQA.from_chain_type(\n", + " llm=llm, chain_type=\"stuff\", retriever=docsearch.as_retriever()\n", + ")\n", + "query = \"what is apache doris\"\n", + "resp = qa.run(query)\n", + "print(resp)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/vectorstores/astradb.ipynb b/docs/docs/integrations/vectorstores/astradb.ipynb index c4b354225d88b..fe980dd137762 100644 --- a/docs/docs/integrations/vectorstores/astradb.ipynb +++ b/docs/docs/integrations/vectorstores/astradb.ipynb @@ -2,13 +2,27 @@ "cells": [ { "cell_type": "markdown", - "id": "d2d6ca14-fb7e-4172-9aa0-a3119a064b96", + "id": "66d0270a-b74f-4110-901e-7960b00297af", "metadata": {}, "source": [ "# Astra DB\n", "\n", - "This page provides a quickstart for using [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) and [Apache Cassandraยฎ](https://cassandra.apache.org/) as a Vector Store.\n", - "\n", + "This page provides a quickstart for using [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) as a Vector Store." + ] + }, + { + "cell_type": "markdown", + "id": "ab8cd64f-3bb2-4f16-a0a9-12d7b1789bf6", + "metadata": {}, + "source": [ + "> DataStax [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) is a serverless vector-capable database built on Apache Cassandraยฎ and made conveniently available through an easy-to-use JSON API." + ] + }, + { + "cell_type": "markdown", + "id": "d2d6ca14-fb7e-4172-9aa0-a3119a064b96", + "metadata": {}, + "source": [ "_Note: in addition to access to the database, an OpenAI API Key is required to run the full example._" ] }, @@ -17,7 +31,7 @@ "id": "bb9be7ce-8c70-4d46-9f11-71c42a36e928", "metadata": {}, "source": [ - "### Setup and general dependencies" + "## Setup and general dependencies" ] }, { @@ -25,7 +39,7 @@ "id": "dbe7c156-0413-47e3-9237-4769c4248869", "metadata": {}, "source": [ - "Use of the integration requires the following Python package." + "Use of the integration requires the corresponding Python package:" ] }, { @@ -35,7 +49,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install --upgrade --quiet \"astrapy>=0.5.3\"" + "pip install --upgrade langchain-astradb" ] }, { @@ -43,8 +57,25 @@ "id": "2453d83a-bc8f-41e1-a692-befe4dd90156", "metadata": {}, "source": [ - "_Note: depending on your LangChain setup, you may need to install/upgrade other dependencies needed for this demo_\n", - "_(specifically, recent versions of `datasets`, `openai`, `pypdf` and `tiktoken` are required)._" + "_**Note.** the following are all packages required to run the full demo on this page. Depending on your LangChain setup, some of them may need to be installed:_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56c1f86e-5921-4976-ac8f-1d62e5a512b0", + "metadata": {}, + "outputs": [], + "source": [ + "pip install langchain langchain-openai datasets pypdf" + ] + }, + { + "cell_type": "markdown", + "id": "c2910035-e61f-48d9-a110-d68c401b62aa", + "metadata": {}, + "source": [ + "### Import dependencies" ] }, { @@ -89,28 +120,12 @@ "embe = OpenAIEmbeddings()" ] }, - { - "cell_type": "markdown", - "id": "dd8caa76-bc41-429e-a93b-989ba13aff01", - "metadata": {}, - "source": [ - "_Keep reading to connect with Astra DB. For usage with Apache Cassandra and Astra DB through CQL, scroll to the section below._" - ] - }, { "cell_type": "markdown", "id": "22866f09-e10d-4f05-a24b-b9420129462e", "metadata": {}, "source": [ - "## Astra DB" - ] - }, - { - "cell_type": "markdown", - "id": "5fba47cc-3533-42fc-84b7-9dc14cd68b2b", - "metadata": {}, - "source": [ - "DataStax [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) is a serverless vector-capable database built on Cassandra and made conveniently available through an easy-to-use JSON API." + "## Import the Vector Store" ] }, { @@ -120,7 +135,7 @@ "metadata": {}, "outputs": [], "source": [ - "from langchain_community.vectorstores import AstraDB" + "from langchain_astradb import AstraDBVectorStore" ] }, { @@ -128,10 +143,13 @@ "id": "68f61b01-3e09-47c1-9d67-5d6915c86626", "metadata": {}, "source": [ - "### Astra DB connection parameters\n", + "## Connection parameters\n", + "\n", + "These are found on your Astra DB dashboard:\n", "\n", "- the API Endpoint looks like `https://01234567-89ab-cdef-0123-456789abcdef-us-east1.apps.astra.datastax.com`\n", - "- the Token looks like `AstraCS:6gBhNmsk135....`" + "- the Token looks like `AstraCS:6gBhNmsk135....`\n", + "- you may optionally provide a _Namespace_ such as `my_namespace`" ] }, { @@ -142,7 +160,21 @@ "outputs": [], "source": [ "ASTRA_DB_API_ENDPOINT = input(\"ASTRA_DB_API_ENDPOINT = \")\n", - "ASTRA_DB_APPLICATION_TOKEN = getpass(\"ASTRA_DB_APPLICATION_TOKEN = \")" + "ASTRA_DB_APPLICATION_TOKEN = getpass(\"ASTRA_DB_APPLICATION_TOKEN = \")\n", + "\n", + "desired_namespace = input(\"(optional) Namespace = \")\n", + "if desired_namespace:\n", + " ASTRA_DB_KEYSPACE = desired_namespace\n", + "else:\n", + " ASTRA_DB_KEYSPACE = None" + ] + }, + { + "cell_type": "markdown", + "id": "196268bd-a950-41c3-bede-f5b55f6a0804", + "metadata": {}, + "source": [ + "Now you can create the vector store:" ] }, { @@ -152,11 +184,12 @@ "metadata": {}, "outputs": [], "source": [ - "vstore = AstraDB(\n", + "vstore = AstraDBVectorStore(\n", " embedding=embe,\n", " collection_name=\"astra_vector_demo\",\n", " api_endpoint=ASTRA_DB_API_ENDPOINT,\n", " token=ASTRA_DB_APPLICATION_TOKEN,\n", + " namespace=ASTRA_DB_KEYSPACE,\n", ")" ] }, @@ -165,7 +198,7 @@ "id": "9a348678-b2f6-46ca-9a0d-2eb4cc6b66b1", "metadata": {}, "source": [ - "### Load a dataset" + "## Load a dataset" ] }, { @@ -243,7 +276,7 @@ "id": "c031760a-1fc5-4855-adf2-02ed52fe2181", "metadata": {}, "source": [ - "### Run simple searches" + "## Run searches" ] }, { @@ -318,12 +351,22 @@ " print(f\"* {res.page_content} [{res.metadata}]\")" ] }, + { + "cell_type": "markdown", + "id": "60fda5df-14e4-4fb0-bd17-65a393fab8a9", + "metadata": {}, + "source": [ + "### Async\n", + "\n", + "Note that the Astra DB vector store supports all fully async methods (`asimilarity_search`, `afrom_texts`, `adelete` and so on) natively, i.e. without thread wrapping involved." + ] + }, { "cell_type": "markdown", "id": "1cc86edd-692b-4495-906c-ccfd13b03c23", "metadata": {}, "source": [ - "### Deleting stored documents" + "## Deleting stored documents" ] }, { @@ -353,7 +396,7 @@ "id": "847181ba-77d1-4a17-b7f9-9e2c3d8efd13", "metadata": {}, "source": [ - "### A minimal RAG chain" + "## A minimal RAG chain" ] }, { @@ -452,7 +495,7 @@ "id": "177610c7-50d0-4b7b-8634-b03338054c8e", "metadata": {}, "source": [ - "### Cleanup" + "## Cleanup" ] }, { @@ -474,290 +517,6 @@ "source": [ "vstore.delete_collection()" ] - }, - { - "cell_type": "markdown", - "id": "94ebaab1-7cbf-4144-a147-7b0e32c43069", - "metadata": {}, - "source": [ - "## Apache Cassandra and Astra DB through CQL" - ] - }, - { - "cell_type": "markdown", - "id": "bc3931b4-211d-4f84-bcc0-51c127e3027c", - "metadata": {}, - "source": [ - "[Cassandra](https://cassandra.apache.org/) is a NoSQL, row-oriented, highly scalable and highly available database.Starting with version 5.0, the database ships with [vector search capabilities](https://cassandra.apache.org/doc/trunk/cassandra/vector-search/overview.html).\n", - "\n", - "DataStax [Astra DB through CQL](https://docs.datastax.com/en/astra-serverless/docs/vector-search/quickstart.html) is a managed serverless database built on Cassandra, offering the same interface and strengths." - ] - }, - { - "cell_type": "markdown", - "id": "a0055fbf-448d-4e46-9c40-28d43df25ca3", - "metadata": {}, - "source": [ - "#### What sets this case apart from \"Astra DB\" above?\n", - "\n", - "Thanks to LangChain having a standardized `VectorStore` interface, most of the \"Astra DB\" section above applies to this case as well. However, this time the database uses the CQL protocol, which means you'll use a _different_ class this time and instantiate it in another way.\n", - "\n", - "The cells below show how you should get your `vstore` object in this case and how you can clean up the database resources at the end: for the rest, i.e. the actual usage of the vector store, you will be able to run the very code that was shown above.\n", - "\n", - "In other words, running this demo in full with Cassandra or Astra DB through CQL means:\n", - "\n", - "- **initialization as shown below**\n", - "- \"Load a dataset\", _see above section_\n", - "- \"Run simple searches\", _see above section_\n", - "- \"MMR search\", _see above section_\n", - "- \"Deleting stored documents\", _see above section_\n", - "- \"A minimal RAG chain\", _see above section_\n", - "- **cleanup as shown below**" - ] - }, - { - "cell_type": "markdown", - "id": "23d12be2-745f-4e72-a82c-334a887bc7cd", - "metadata": {}, - "source": [ - "### Initialization" - ] - }, - { - "cell_type": "markdown", - "id": "e3212542-79be-423e-8e1f-b8d725e3cda8", - "metadata": {}, - "source": [ - "The class to use is the following:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "941af73e-a090-4fba-b23c-595757d470eb", - "metadata": {}, - "outputs": [], - "source": [ - "from langchain_community.vectorstores import Cassandra" - ] - }, - { - "cell_type": "markdown", - "id": "414d1e72-f7c9-4b6d-bf6f-16075712c7e3", - "metadata": {}, - "source": [ - "Now, depending on whether you connect to a Cassandra cluster or to Astra DB through CQL, you will provide different parameters when creating the vector store object." - ] - }, - { - "cell_type": "markdown", - "id": "48ecca56-71a4-4a91-b198-29384c44ce27", - "metadata": {}, - "source": [ - "#### Initialization (Cassandra cluster)" - ] - }, - { - "cell_type": "markdown", - "id": "55ebe958-5654-43e0-9aed-d607ffd3fa48", - "metadata": {}, - "source": [ - "In this case, you first need to create a `cassandra.cluster.Session` object, as described in the [Cassandra driver documentation](https://docs.datastax.com/en/developer/python-driver/latest/api/cassandra/cluster/#module-cassandra.cluster). The details vary (e.g. with network settings and authentication), but this might be something like:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "4642dafb-a065-4063-b58c-3d276f5ad07e", - "metadata": {}, - "outputs": [], - "source": [ - "from cassandra.cluster import Cluster\n", - "\n", - "cluster = Cluster([\"127.0.0.1\"])\n", - "session = cluster.connect()" - ] - }, - { - "cell_type": "markdown", - "id": "624c93bf-fb46-4350-bcfa-09ca09dc068f", - "metadata": {}, - "source": [ - "You can now set the session, along with your desired keyspace name, as a global CassIO parameter:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "92a4ab28-1c4f-4dad-9671-d47e0b1dde7b", - "metadata": {}, - "outputs": [], - "source": [ - "import cassio\n", - "\n", - "CASSANDRA_KEYSPACE = input(\"CASSANDRA_KEYSPACE = \")\n", - "\n", - "cassio.init(session=session, keyspace=CASSANDRA_KEYSPACE)" - ] - }, - { - "cell_type": "markdown", - "id": "3b87a824-36f1-45b4-b54c-efec2a2de216", - "metadata": {}, - "source": [ - "Now you can create the vector store:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "853a2a88-a565-4e24-8789-d78c213954a6", - "metadata": {}, - "outputs": [], - "source": [ - "vstore = Cassandra(\n", - " embedding=embe,\n", - " table_name=\"cassandra_vector_demo\",\n", - " # session=None, keyspace=None # Uncomment on older versions of LangChain\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "768ddf7a-0c3e-4134-ad38-25ac53c3da7a", - "metadata": {}, - "source": [ - "#### Initialization (Astra DB through CQL)" - ] - }, - { - "cell_type": "markdown", - "id": "4ed4269a-b7e7-4503-9e66-5a11335c7681", - "metadata": {}, - "source": [ - "In this case you initialize CassIO with the following connection parameters:\n", - "\n", - "- the Database ID, e.g. `01234567-89ab-cdef-0123-456789abcdef`\n", - "- the Token, e.g. `AstraCS:6gBhNmsk135....` (it must be a \"Database Administrator\" token)\n", - "- Optionally a Keyspace name (if omitted, the default one for the database will be used)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5fa6bd74-d4b2-45c5-9757-96dddc6242fb", - "metadata": {}, - "outputs": [], - "source": [ - "ASTRA_DB_ID = input(\"ASTRA_DB_ID = \")\n", - "ASTRA_DB_APPLICATION_TOKEN = getpass(\"ASTRA_DB_APPLICATION_TOKEN = \")\n", - "\n", - "desired_keyspace = input(\"ASTRA_DB_KEYSPACE (optional, can be left empty) = \")\n", - "if desired_keyspace:\n", - " ASTRA_DB_KEYSPACE = desired_keyspace\n", - "else:\n", - " ASTRA_DB_KEYSPACE = None" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "add6e585-17ff-452e-8ef6-7e485ead0b06", - "metadata": {}, - "outputs": [], - "source": [ - "import cassio\n", - "\n", - "cassio.init(\n", - " database_id=ASTRA_DB_ID,\n", - " token=ASTRA_DB_APPLICATION_TOKEN,\n", - " keyspace=ASTRA_DB_KEYSPACE,\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "b305823c-bc98-4f3d-aabb-d7eb663ea421", - "metadata": {}, - "source": [ - "Now you can create the vector store:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "f45f3038-9d59-41cc-8b43-774c6aa80295", - "metadata": {}, - "outputs": [], - "source": [ - "vstore = Cassandra(\n", - " embedding=embe,\n", - " table_name=\"cassandra_vector_demo\",\n", - " # session=None, keyspace=None # Uncomment on older versions of LangChain\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "39284918-cf8a-49bb-a2d3-aef285bb2ffa", - "metadata": {}, - "source": [ - "### Usage of the vector store" - ] - }, - { - "cell_type": "markdown", - "id": "3cc1aead-d6ec-48a3-affe-1d0cffa955a9", - "metadata": {}, - "source": [ - "_See the sections \"Load a dataset\" through \"A minimal RAG chain\" above._\n", - "\n", - "Speaking of the latter, you can check out a full RAG template for Astra DB through CQL [here](https://github.com/langchain-ai/langchain/tree/master/templates/cassandra-entomology-rag)." - ] - }, - { - "cell_type": "markdown", - "id": "096397d8-6622-4685-9f9d-7e238beca467", - "metadata": {}, - "source": [ - "### Cleanup" - ] - }, - { - "cell_type": "markdown", - "id": "cc1e74f9-5500-41aa-836f-235b1ed5f20c", - "metadata": {}, - "source": [ - "the following essentially retrieves the `Session` object from CassIO and runs a CQL `DROP TABLE` statement with it:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b5b82c33-0e77-4a37-852c-8d50edbdd991", - "metadata": {}, - "outputs": [], - "source": [ - "cassio.config.resolve_session().execute(\n", - " f\"DROP TABLE {cassio.config.resolve_keyspace()}.cassandra_vector_demo;\"\n", - ")" - ] - }, - { - "cell_type": "markdown", - "id": "c10ece4d-ae06-42ab-baf4-4d0ac2051743", - "metadata": {}, - "source": [ - "### Learn more" - ] - }, - { - "cell_type": "markdown", - "id": "51ea8b69-7e15-458f-85aa-9fa199f95f9c", - "metadata": {}, - "source": [ - "For more information, extended quickstarts and additional usage examples, please visit the [CassIO documentation](https://cassio.org/frameworks/langchain/about/) for more on using the LangChain `Cassandra` vector store." - ] } ], "metadata": { @@ -776,7 +535,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.9.18" } }, "nbformat": 4, diff --git a/docs/docs/integrations/vectorstores/cassandra.ipynb b/docs/docs/integrations/vectorstores/cassandra.ipynb new file mode 100644 index 0000000000000..524f76a1052c9 --- /dev/null +++ b/docs/docs/integrations/vectorstores/cassandra.ipynb @@ -0,0 +1,651 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "d2d6ca14-fb7e-4172-9aa0-a3119a064b96", + "metadata": {}, + "source": [ + "# Apache Cassandra\n", + "\n", + "This page provides a quickstart for using [Apache Cassandraยฎ](https://cassandra.apache.org/) as a Vector Store." + ] + }, + { + "cell_type": "markdown", + "id": "6a1a562e-3d1a-4693-b55d-08bf90943a9a", + "metadata": {}, + "source": [ + "> [Cassandra](https://cassandra.apache.org/) is a NoSQL, row-oriented, highly scalable and highly available database.Starting with version 5.0, the database ships with [vector search capabilities](https://cassandra.apache.org/doc/trunk/cassandra/vector-search/overview.html)." + ] + }, + { + "cell_type": "markdown", + "id": "9cf37d7f-c18e-4e63-adea-138e5e981475", + "metadata": {}, + "source": [ + "_Note: in addition to access to the database, an OpenAI API Key is required to run the full example._" + ] + }, + { + "cell_type": "markdown", + "id": "bb9be7ce-8c70-4d46-9f11-71c42a36e928", + "metadata": {}, + "source": [ + "### Setup and general dependencies" + ] + }, + { + "cell_type": "markdown", + "id": "dbe7c156-0413-47e3-9237-4769c4248869", + "metadata": {}, + "source": [ + "Use of the integration requires the following Python package." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8d00fcf4-9798-4289-9214-d9734690adfc", + "metadata": {}, + "outputs": [], + "source": [ + "%pip install --upgrade --quiet \"cassio>=0.1.4\"" + ] + }, + { + "cell_type": "markdown", + "id": "2453d83a-bc8f-41e1-a692-befe4dd90156", + "metadata": {}, + "source": [ + "_Note: depending on your LangChain setup, you may need to install/upgrade other dependencies needed for this demo_\n", + "_(specifically, recent versions of `datasets`, `openai`, `pypdf` and `tiktoken` are required, along with `langchain-community`)._" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b06619af-fea2-4863-8149-7f239a8c9c82", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from getpass import getpass\n", + "\n", + "from datasets import (\n", + " load_dataset,\n", + ")\n", + "from langchain.schema import Document\n", + "from langchain.text_splitter import RecursiveCharacterTextSplitter\n", + "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_core.output_parsers import StrOutputParser\n", + "from langchain_core.prompts import ChatPromptTemplate\n", + "from langchain_core.runnables import RunnablePassthrough\n", + "from langchain_openai import ChatOpenAI, OpenAIEmbeddings" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1983f1da-0ae7-4a9b-bf4c-4ade328f7a3a", + "metadata": {}, + "outputs": [], + "source": [ + "os.environ[\"OPENAI_API_KEY\"] = getpass(\"OPENAI_API_KEY = \")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c656df06-e938-4bc5-b570-440b8b7a0189", + "metadata": {}, + "outputs": [], + "source": [ + "embe = OpenAIEmbeddings()" + ] + }, + { + "cell_type": "markdown", + "id": "22866f09-e10d-4f05-a24b-b9420129462e", + "metadata": {}, + "source": [ + "## Import the Vector Store" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0b32730d-176e-414c-9d91-fd3644c54211", + "metadata": {}, + "outputs": [], + "source": [ + "from langchain_community.vectorstores import Cassandra" + ] + }, + { + "cell_type": "markdown", + "id": "68f61b01-3e09-47c1-9d67-5d6915c86626", + "metadata": {}, + "source": [ + "## Connection parameters\n", + "\n", + "The Vector Store integration shown in this page can be used with Cassandra as well as other derived databases, such as Astra DB, which use the CQL (Cassandra Query Language) protocol.\n", + "\n", + "> DataStax [Astra DB](https://docs.datastax.com/en/astra-serverless/docs/vector-search/quickstart.html) is a managed serverless database built on Cassandra, offering the same interface and strengths.\n", + "\n", + "Depending on whether you connect to a Cassandra cluster or to Astra DB through CQL, you will provide different parameters when creating the vector store object." + ] + }, + { + "cell_type": "markdown", + "id": "36bbb3d9-4d07-4f63-b23d-c52be03f8938", + "metadata": {}, + "source": [ + "### Connecting to a Cassandra cluster\n", + "\n", + "You first need to create a `cassandra.cluster.Session` object, as described in the [Cassandra driver documentation](https://docs.datastax.com/en/developer/python-driver/latest/api/cassandra/cluster/#module-cassandra.cluster). The details vary (e.g. with network settings and authentication), but this might be something like:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d95bb1d4-d8a6-4e66-89bc-776f9c6f962b", + "metadata": {}, + "outputs": [], + "source": [ + "from cassandra.cluster import Cluster\n", + "\n", + "cluster = Cluster([\"127.0.0.1\"])\n", + "session = cluster.connect()" + ] + }, + { + "cell_type": "markdown", + "id": "8279aa78-96d6-43ad-aa21-79fd798d895d", + "metadata": {}, + "source": [ + "You can now set the session, along with your desired keyspace name, as a global CassIO parameter:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "29ececc4-e50b-4428-967f-4b6bbde12a14", + "metadata": {}, + "outputs": [], + "source": [ + "import cassio\n", + "\n", + "CASSANDRA_KEYSPACE = input(\"CASSANDRA_KEYSPACE = \")\n", + "\n", + "cassio.init(session=session, keyspace=CASSANDRA_KEYSPACE)" + ] + }, + { + "cell_type": "markdown", + "id": "0bd035a2-f0af-418f-94e5-0fbb4d51ac3c", + "metadata": {}, + "source": [ + "Now you can create the vector store:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eeb62cde-89fc-44d7-ba76-91e19cbc5898", + "metadata": {}, + "outputs": [], + "source": [ + "vstore = Cassandra(\n", + " embedding=embe,\n", + " table_name=\"cassandra_vector_demo\",\n", + " # session=None, keyspace=None # Uncomment on older versions of LangChain\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "ce240555-e5fc-431d-ac0f-bcf2f6e6a5fb", + "metadata": {}, + "source": [ + "_Note: you can also pass your session and keyspace directly as parameters when creating the vector store. Using the global `cassio.init` setting, however, comes handy if your applications uses Cassandra in several ways (for instance, for vector store, chat memory and LLM response caching), as it allows to centralize credential and DB connection management in one place._" + ] + }, + { + "cell_type": "markdown", + "id": "b598e5fa-eb62-4939-9734-091628e84db4", + "metadata": {}, + "source": [ + "### Connecting to Astra DB through CQL" + ] + }, + { + "cell_type": "markdown", + "id": "2feec7c3-7092-4252-9a3f-05eda4babe74", + "metadata": {}, + "source": [ + "In this case you initialize CassIO with the following connection parameters:\n", + "\n", + "- the Database ID, e.g. `01234567-89ab-cdef-0123-456789abcdef`\n", + "- the Token, e.g. `AstraCS:6gBhNmsk135....` (it must be a \"Database Administrator\" token)\n", + "- Optionally a Keyspace name (if omitted, the default one for the database will be used)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2f96147d-6d76-4101-bbb0-4a7f215c3d2d", + "metadata": {}, + "outputs": [], + "source": [ + "ASTRA_DB_ID = input(\"ASTRA_DB_ID = \")\n", + "ASTRA_DB_APPLICATION_TOKEN = getpass(\"ASTRA_DB_APPLICATION_TOKEN = \")\n", + "\n", + "desired_keyspace = input(\"ASTRA_DB_KEYSPACE (optional, can be left empty) = \")\n", + "if desired_keyspace:\n", + " ASTRA_DB_KEYSPACE = desired_keyspace\n", + "else:\n", + " ASTRA_DB_KEYSPACE = None" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d653df1d-9dad-4980-ba52-76a47b4c5c1a", + "metadata": {}, + "outputs": [], + "source": [ + "import cassio\n", + "\n", + "cassio.init(\n", + " database_id=ASTRA_DB_ID,\n", + " token=ASTRA_DB_APPLICATION_TOKEN,\n", + " keyspace=ASTRA_DB_KEYSPACE,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e606b58b-d390-4fed-a2fc-65036c44860f", + "metadata": {}, + "source": [ + "Now you can create the vector store:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9cb552d1-e888-4550-a350-6df06b1f5aae", + "metadata": {}, + "outputs": [], + "source": [ + "vstore = Cassandra(\n", + " embedding=embe,\n", + " table_name=\"cassandra_vector_demo\",\n", + " # session=None, keyspace=None # Uncomment on older versions of LangChain\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "9a348678-b2f6-46ca-9a0d-2eb4cc6b66b1", + "metadata": {}, + "source": [ + "## Load a dataset" + ] + }, + { + "cell_type": "markdown", + "id": "552e56b0-301a-4b06-99c7-57ba6faa966f", + "metadata": {}, + "source": [ + "Convert each entry in the source dataset into a `Document`, then write them into the vector store:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3a1f532f-ad63-4256-9730-a183841bd8e9", + "metadata": {}, + "outputs": [], + "source": [ + "philo_dataset = load_dataset(\"datastax/philosopher-quotes\")[\"train\"]\n", + "\n", + "docs = []\n", + "for entry in philo_dataset:\n", + " metadata = {\"author\": entry[\"author\"]}\n", + " doc = Document(page_content=entry[\"quote\"], metadata=metadata)\n", + " docs.append(doc)\n", + "\n", + "inserted_ids = vstore.add_documents(docs)\n", + "print(f\"\\nInserted {len(inserted_ids)} documents.\")" + ] + }, + { + "cell_type": "markdown", + "id": "79d4f436-ef04-4288-8f79-97c9abb983ed", + "metadata": {}, + "source": [ + "In the above, `metadata` dictionaries are created from the source data and are part of the `Document`." + ] + }, + { + "cell_type": "markdown", + "id": "084d8802-ab39-4262-9a87-42eafb746f92", + "metadata": {}, + "source": [ + "Add some more entries, this time with `add_texts`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b6b157f5-eb31-4907-a78e-2e2b06893936", + "metadata": {}, + "outputs": [], + "source": [ + "texts = [\"I think, therefore I am.\", \"To the things themselves!\"]\n", + "metadatas = [{\"author\": \"descartes\"}, {\"author\": \"husserl\"}]\n", + "ids = [\"desc_01\", \"huss_xy\"]\n", + "\n", + "inserted_ids_2 = vstore.add_texts(texts=texts, metadatas=metadatas, ids=ids)\n", + "print(f\"\\nInserted {len(inserted_ids_2)} documents.\")" + ] + }, + { + "cell_type": "markdown", + "id": "63840eb3-8b29-4017-bc2f-301bf5001f28", + "metadata": {}, + "source": [ + "_Note: you may want to speed up the execution of `add_texts` and `add_documents` by increasing the concurrency level for_\n", + "_these bulk operations - check out the methods' `batch_size` parameter_\n", + "_for more details. Depending on the network and the client machine specifications, your best-performing choice of parameters may vary._" + ] + }, + { + "cell_type": "markdown", + "id": "c031760a-1fc5-4855-adf2-02ed52fe2181", + "metadata": {}, + "source": [ + "## Run searches" + ] + }, + { + "cell_type": "markdown", + "id": "02a77d8e-1aae-4054-8805-01c77947c49f", + "metadata": {}, + "source": [ + "This section demonstrates metadata filtering and getting the similarity scores back:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1761806a-1afd-4491-867c-25a80d92b9fe", + "metadata": {}, + "outputs": [], + "source": [ + "results = vstore.similarity_search(\"Our life is what we make of it\", k=3)\n", + "for res in results:\n", + " print(f\"* {res.page_content} [{res.metadata}]\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eebc4f7c-f61a-438e-b3c8-17e6888d8a0b", + "metadata": {}, + "outputs": [], + "source": [ + "results_filtered = vstore.similarity_search(\n", + " \"Our life is what we make of it\",\n", + " k=3,\n", + " filter={\"author\": \"plato\"},\n", + ")\n", + "for res in results_filtered:\n", + " print(f\"* {res.page_content} [{res.metadata}]\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11bbfe64-c0cd-40c6-866a-a5786538450e", + "metadata": {}, + "outputs": [], + "source": [ + "results = vstore.similarity_search_with_score(\"Our life is what we make of it\", k=3)\n", + "for res, score in results:\n", + " print(f\"* [SIM={score:3f}] {res.page_content} [{res.metadata}]\")" + ] + }, + { + "cell_type": "markdown", + "id": "b14ea558-bfbe-41ce-807e-d70670060ada", + "metadata": {}, + "source": [ + "### MMR (Maximal-marginal-relevance) search" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76381ce8-780a-4e3b-97b1-056d6782d7d5", + "metadata": {}, + "outputs": [], + "source": [ + "results = vstore.max_marginal_relevance_search(\n", + " \"Our life is what we make of it\",\n", + " k=3,\n", + " filter={\"author\": \"aristotle\"},\n", + ")\n", + "for res in results:\n", + " print(f\"* {res.page_content} [{res.metadata}]\")" + ] + }, + { + "cell_type": "markdown", + "id": "1cc86edd-692b-4495-906c-ccfd13b03c23", + "metadata": {}, + "source": [ + "## Deleting stored documents" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "38a70ec4-b522-4d32-9ead-c642864fca37", + "metadata": {}, + "outputs": [], + "source": [ + "delete_1 = vstore.delete(inserted_ids[:3])\n", + "print(f\"all_succeed={delete_1}\") # True, all documents deleted" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d4cf49ed-9d29-4ed9-bdab-51a308c41b8e", + "metadata": {}, + "outputs": [], + "source": [ + "delete_2 = vstore.delete(inserted_ids[2:5])\n", + "print(f\"some_succeeds={delete_2}\") # True, though some IDs were gone already" + ] + }, + { + "cell_type": "markdown", + "id": "847181ba-77d1-4a17-b7f9-9e2c3d8efd13", + "metadata": {}, + "source": [ + "## A minimal RAG chain" + ] + }, + { + "cell_type": "markdown", + "id": "cd64b844-846f-43c5-a7dd-c26b9ed417d0", + "metadata": {}, + "source": [ + "The next cells will implement a simple RAG pipeline:\n", + "- download a sample PDF file and load it onto the store;\n", + "- create a RAG chain with LCEL (LangChain Expression Language), with the vector store at its heart;\n", + "- run the question-answering chain." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5cbc4dba-0d5e-4038-8fc5-de6cadd1c2a9", + "metadata": {}, + "outputs": [], + "source": [ + "!curl -L \\\n", + " \"https://github.com/awesome-astra/datasets/blob/main/demo-resources/what-is-philosophy/what-is-philosophy.pdf?raw=true\" \\\n", + " -o \"what-is-philosophy.pdf\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "459385be-5e9c-47ff-ba53-2b7ae6166b09", + "metadata": {}, + "outputs": [], + "source": [ + "pdf_loader = PyPDFLoader(\"what-is-philosophy.pdf\")\n", + "splitter = RecursiveCharacterTextSplitter(chunk_size=512, chunk_overlap=64)\n", + "docs_from_pdf = pdf_loader.load_and_split(text_splitter=splitter)\n", + "\n", + "print(f\"Documents from PDF: {len(docs_from_pdf)}.\")\n", + "inserted_ids_from_pdf = vstore.add_documents(docs_from_pdf)\n", + "print(f\"Inserted {len(inserted_ids_from_pdf)} documents.\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5010a66c-4298-4e32-82b5-2da0d36a5c70", + "metadata": {}, + "outputs": [], + "source": [ + "retriever = vstore.as_retriever(search_kwargs={\"k\": 3})\n", + "\n", + "philo_template = \"\"\"\n", + "You are a philosopher that draws inspiration from great thinkers of the past\n", + "to craft well-thought answers to user questions. Use the provided context as the basis\n", + "for your answers and do not make up new reasoning paths - just mix-and-match what you are given.\n", + "Your answers must be concise and to the point, and refrain from answering about other topics than philosophy.\n", + "\n", + "CONTEXT:\n", + "{context}\n", + "\n", + "QUESTION: {question}\n", + "\n", + "YOUR ANSWER:\"\"\"\n", + "\n", + "philo_prompt = ChatPromptTemplate.from_template(philo_template)\n", + "\n", + "llm = ChatOpenAI()\n", + "\n", + "chain = (\n", + " {\"context\": retriever, \"question\": RunnablePassthrough()}\n", + " | philo_prompt\n", + " | llm\n", + " | StrOutputParser()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fcbc1296-6c7c-478b-b55b-533ba4e54ddb", + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"How does Russel elaborate on Peirce's idea of the security blanket?\")" + ] + }, + { + "cell_type": "markdown", + "id": "869ab448-a029-4692-aefc-26b85513314d", + "metadata": {}, + "source": [ + "For more, check out a complete RAG template using Astra DB through CQL [here](https://github.com/langchain-ai/langchain/tree/master/templates/cassandra-entomology-rag)." + ] + }, + { + "cell_type": "markdown", + "id": "177610c7-50d0-4b7b-8634-b03338054c8e", + "metadata": {}, + "source": [ + "## Cleanup" + ] + }, + { + "cell_type": "markdown", + "id": "0da4d19f-9878-4d3d-82c9-09cafca20322", + "metadata": {}, + "source": [ + "the following essentially retrieves the `Session` object from CassIO and runs a CQL `DROP TABLE` statement with it:\n", + "\n", + "_(You will lose the data you stored in it.)_" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd405a13-6f71-46fa-87e6-167238e9c25e", + "metadata": {}, + "outputs": [], + "source": [ + "cassio.config.resolve_session().execute(\n", + " f\"DROP TABLE {cassio.config.resolve_keyspace()}.cassandra_vector_demo;\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "c10ece4d-ae06-42ab-baf4-4d0ac2051743", + "metadata": {}, + "source": [ + "### Learn more" + ] + }, + { + "cell_type": "markdown", + "id": "51ea8b69-7e15-458f-85aa-9fa199f95f9c", + "metadata": {}, + "source": [ + "For more information, extended quickstarts and additional usage examples, please visit the [CassIO documentation](https://cassio.org/frameworks/langchain/about/) for more on using the LangChain `Cassandra` vector store." + ] + }, + { + "cell_type": "markdown", + "id": "3b8ee30c-2c84-42f3-9cff-e80dbc590490", + "metadata": {}, + "source": [ + "#### Attribution statement\n", + "\n", + "> Apache Cassandra, Cassandra and Apache are either registered trademarks or trademarks of the [Apache Software Foundation](http://www.apache.org/) in the United States and/or other countries.\n" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.17" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/docs/docs/integrations/vectorstores/elasticsearch.ipynb b/docs/docs/integrations/vectorstores/elasticsearch.ipynb index 9032e698c2a1c..c36c6f65ee632 100644 --- a/docs/docs/integrations/vectorstores/elasticsearch.ipynb +++ b/docs/docs/integrations/vectorstores/elasticsearch.ipynb @@ -92,6 +92,28 @@ " )\n", "```\n", "\n", + "You can also use an `Elasticsearch` client object that gives you more flexibility, for example to configure the maximum number of retries.\n", + "\n", + "Example:\n", + "```python\n", + " import elasticsearch\n", + " from langchain_community.vectorstores import ElasticsearchStore\n", + "\n", + " es_client= elasticsearch.Elasticsearch(\n", + " hosts=[\"http://localhost:9200\"],\n", + " es_user=\"elastic\",\n", + " es_password=\"changeme\"\n", + " max_retries=10,\n", + " )\n", + "\n", + " embedding = OpenAIEmbeddings()\n", + " elastic_vector_search = ElasticsearchStore(\n", + " index_name=\"test_index\",\n", + " es_connection=es_client,\n", + " embedding=embedding,\n", + " )\n", + "```\n", + "\n", "#### How to obtain a password for the default \"elastic\" user?\n", "\n", "To obtain your Elastic Cloud password for the default \"elastic\" user:\n", @@ -134,7 +156,7 @@ "id": "ea167a29", "metadata": {}, "source": [ - "We want to use `OpenAIEmbeddings` so we have to get the OpenAI API Key." + "To use the `OpenAIEmbeddings` we have to configure the OpenAI API Key in the environment." ] }, { diff --git a/docs/docs/integrations/vectorstores/lancedb.ipynb b/docs/docs/integrations/vectorstores/lancedb.ipynb index ab5c56eb8f3cd..18eb519eecd3b 100644 --- a/docs/docs/integrations/vectorstores/lancedb.ipynb +++ b/docs/docs/integrations/vectorstores/lancedb.ipynb @@ -14,14 +14,50 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "id": "bfcf346a", "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: lancedb in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (0.4.4)\n", + "Requirement already satisfied: deprecation in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (2.1.0)\n", + "Requirement already satisfied: pylance==0.9.6 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (0.9.6)\n", + "Requirement already satisfied: ratelimiter~=1.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (1.2.0.post0)\n", + "Requirement already satisfied: retry>=0.9.2 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (0.9.2)\n", + "Requirement already satisfied: tqdm>=4.27.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (4.66.1)\n", + "Requirement already satisfied: pydantic>=1.10 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (2.4.2)\n", + "Requirement already satisfied: attrs>=21.3.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (23.1.0)\n", + "Requirement already satisfied: semver>=3.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (3.0.2)\n", + "Requirement already satisfied: cachetools in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (5.3.2)\n", + "Requirement already satisfied: pyyaml>=6.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (6.0.1)\n", + "Requirement already satisfied: click>=8.1.7 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (8.1.7)\n", + "Requirement already satisfied: requests>=2.31.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (2.31.0)\n", + "Requirement already satisfied: overrides>=0.7 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from lancedb) (7.4.0)\n", + "Requirement already satisfied: pyarrow>=12 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from pylance==0.9.6->lancedb) (14.0.2)\n", + "Requirement already satisfied: numpy>=1.22 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from pylance==0.9.6->lancedb) (1.24.4)\n", + "Requirement already satisfied: annotated-types>=0.4.0 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from pydantic>=1.10->lancedb) (0.5.0)\n", + "Requirement already satisfied: pydantic-core==2.10.1 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from pydantic>=1.10->lancedb) (2.10.1)\n", + "Requirement already satisfied: typing-extensions>=4.6.1 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from pydantic>=1.10->lancedb) (4.8.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from requests>=2.31.0->lancedb) (3.3.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from requests>=2.31.0->lancedb) (3.4)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from requests>=2.31.0->lancedb) (2.0.6)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from requests>=2.31.0->lancedb) (2023.7.22)\n", + "Requirement already satisfied: decorator>=3.4.2 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from retry>=0.9.2->lancedb) (5.1.1)\n", + "Requirement already satisfied: py<2.0.0,>=1.4.26 in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from retry>=0.9.2->lancedb) (1.11.0)\n", + "Requirement already satisfied: packaging in /Users/raghavdixit/Desktop/langchain_lance/.dev_env/lib/python3.11/site-packages (from deprecation->lancedb) (23.2)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m23.2.1\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m23.3.2\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n" + ] + } + ], "source": [ - "%pip install --upgrade --quiet lancedb" + "! pip install lancedb" ] }, { @@ -34,20 +70,12 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 4, "id": "a0361f5c-e6f4-45f4-b829-11680cf03cec", "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "OpenAI API Key: ยทยทยทยทยทยทยทยท\n" - ] - } - ], + "outputs": [], "source": [ "import getpass\n", "import os\n", @@ -57,15 +85,15 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "id": "aac9563e", "metadata": { "tags": [] }, "outputs": [], "source": [ - "from langchain_community.vectorstores import LanceDB\n", - "from langchain_openai import OpenAIEmbeddings" + "from langchain.embeddings import OpenAIEmbeddings\n", + "from langchain.vectorstores import LanceDB" ] }, { @@ -75,14 +103,13 @@ "metadata": {}, "outputs": [], "source": [ + "from langchain.document_loaders import TextLoader\n", "from langchain.text_splitter import CharacterTextSplitter\n", - "from langchain_community.document_loaders import TextLoader\n", "\n", "loader = TextLoader(\"../../modules/state_of_the_union.txt\")\n", "documents = loader.load()\n", "\n", "documents = CharacterTextSplitter().split_documents(documents)\n", - "\n", "embeddings = OpenAIEmbeddings()" ] }, @@ -93,22 +120,7 @@ "metadata": {}, "outputs": [], "source": [ - "import lancedb\n", - "\n", - "db = lancedb.connect(\"/tmp/lancedb\")\n", - "table = db.create_table(\n", - " \"my_table\",\n", - " data=[\n", - " {\n", - " \"vector\": embeddings.embed_query(\"Hello World\"),\n", - " \"text\": \"Hello World\",\n", - " \"id\": \"1\",\n", - " }\n", - " ],\n", - " mode=\"overwrite\",\n", - ")\n", - "\n", - "docsearch = LanceDB.from_documents(documents, embeddings, connection=table)\n", + "docsearch = LanceDB.from_documents(documents, embeddings)\n", "\n", "query = \"What did the president say about Ketanji Brown Jackson\"\n", "docs = docsearch.similarity_search(query)" @@ -116,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 13, "id": "9c608226", "metadata": {}, "outputs": [ @@ -136,7 +148,7 @@ "\n", "Iโ€™ve worked on these issues a long time. \n", "\n", - "I know what works: Investing in crime preventionand community police officers whoโ€™ll walk the beat, whoโ€™ll know the neighborhood, and who can restore trust and safety. \n", + "I know what works: Investing in crime prevention and community police officers whoโ€™ll walk the beat, whoโ€™ll know the neighborhood, and who can restore trust and safety. \n", "\n", "So letโ€™s not abandon our streets. Or choose between safety and equal justice. \n", "\n", @@ -192,11 +204,97 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "id": "a359ed74", "metadata": {}, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "They were responding to a 9-1-1 call when a man shot and killed them with a stolen gun. \n", + "\n", + "Officer Mora was 27 years old. \n", + "\n", + "Officer Rivera was 22. \n", + "\n", + "Both Dominican Americans whoโ€™d grown up on the same streets they later chose to patrol as police officers. \n", + "\n", + "I spoke with their families and told them that we are forever in debt for their sacrifice, and we will carry on their mission to restore the trust and safety every community deserves. \n", + "\n", + "Iโ€™ve worked on these issues a long time. \n", + "\n", + "I know what works: Investing in crime prevention and community police officers whoโ€™ll walk the beat, whoโ€™ll know the neighborhood, and who can restore trust and safety. \n", + "\n", + "So letโ€™s not abandon our streets. Or choose between safety and equal justice. \n", + "\n", + "Letโ€™s come together to protect our communities, restore trust, and hold law enforcement accountable. \n", + "\n", + "Thatโ€™s why the Justice Department required body cameras, banned chokeholds, and restricted no-knock warrants for its officers. \n", + "\n", + "Thatโ€™s why the American Rescue Plan provided $350 Billion that cities, states, and counties can use to hire more police and invest in proven strategies like community violence interruptionโ€”trusted messengers breaking the cycle of violence and trauma and giving young people hope. \n", + "\n", + "We should all agree: The answer is not to Defund the police. The answer is to FUND the police with the resources and training they need to protect our communities. \n", + "\n", + "I ask Democrats and Republicans alike: Pass my budget and keep our neighborhoods safe. \n", + "\n", + "And I will keep doing everything in my power to crack down on gun trafficking and ghost guns you can buy online and make at homeโ€”they have no serial numbers and canโ€™t be traced. \n", + "\n", + "And I ask Congress to pass proven measures to reduce gun violence. Pass universal background checks. Why should anyone on a terrorist list be able to purchase a weapon? \n", + "\n", + "Ban assault weapons and high-capacity magazines. \n", + "\n", + "Repeal the liability shield that makes gun manufacturers the only industry in America that canโ€™t be sued. \n", + "\n", + "These laws donโ€™t infringe on the Second Amendment. They save lives. \n", + "\n", + "The most fundamental right in America is the right to vote โ€“ and to have it counted. And itโ€™s under assault. \n", + "\n", + "In state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \n", + "\n", + "We cannot let this happen. \n", + "\n", + "Tonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while youโ€™re at it, pass the Disclose Act so Americans can know who is funding our elections. \n", + "\n", + "Tonight, Iโ€™d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyerโ€”an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \n", + "\n", + "One of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \n", + "\n", + "And I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nationโ€™s top legal minds, who will continue Justice Breyerโ€™s legacy of excellence. \n", + "\n", + "A former top litigator in private practice. A former federal public defender. And from a family of public school educators and police officers. A consensus builder. Since sheโ€™s been nominated, sheโ€™s received a broad range of supportโ€”from the Fraternal Order of Police to former judges appointed by Democrats and Republicans. \n", + "\n", + "And if we are to advance liberty and justice, we need to secure the Border and fix the immigration system. \n", + "\n", + "We can do both. At our border, weโ€™ve installed new technology like cutting-edge scanners to better detect drug smuggling. \n", + "\n", + "Weโ€™ve set up joint patrols with Mexico and Guatemala to catch more human traffickers. \n", + "\n", + "Weโ€™re putting in place dedicated immigration judges so families fleeing persecution and violence can have their cases heard faster.\n" + ] + } + ], + "source": [ + "print(docs[0].page_content)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "12ca9ea8-3d09-49fb-922e-47c64ba90f28", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "{'vector': [-0.005863776430487633, -0.0019847142975777388, -0.004525014664977789, -0.002664136001840234, -0.0007940530776977539, 0.01969318464398384, 0.01712276227772236, 0.008474362082779408, -0.01931833289563656, -0.016988886520266533, 0.01086405199021101, 0.010763644240796566, -0.0004455566522665322, -0.007537228986620903, -0.003405475290492177, -0.0009003172744996846, 0.03338871896266937, -0.009672553278505802, 0.007657717447727919, -0.03087184764444828, -0.014016835950314999, 0.003234783187508583, 0.014552340842783451, 0.0068009099923074245, 0.0008007469004951417, 0.010261609219014645, 0.03170187771320343, -0.010013937950134277, 0.011004622094333172, -0.018608788028359413, -0.01729680225253105, 0.0061917733401060104, -0.036789171397686005, -0.018448136746883392, -0.02779269404709339, -0.0061415694653987885, 0.0002734002482611686, -0.011084947735071182, 0.018943479284644127, -0.014217650517821312, 0.036173343658447266, -0.02574438974261284, 0.002319404622539878, -0.01838119886815548, -0.019104130566120148, 0.017952794209122658, -0.00919059943407774, -0.020764194428920746, -0.026052303612232208, 0.025610512122511864, 0.044580765068531036, 0.0020282240584492683, -0.029211781919002533, -0.024994682520627975, 0.011586982756853104, -0.013735695742070675, -0.013327373191714287, 0.009378026239573956, -0.01097115222364664, -0.011607064865529537, 0.013882959261536598, 0.0014149037888273597, -0.02219666913151741, 0.01697549782693386, -0.009411495178937912, -0.01838119886815548, 0.0012860479764640331, 0.02172810398042202, -0.003882409306243062, 0.015797387808561325, 0.054246626794338226, 0.0028314811643213034, 0.026186181232333183, -0.0068678478710353374, 0.031621553003787994, -0.019719960168004036, -0.005365087650716305, -0.004725828766822815, -0.0011948448373004794, -0.017725205048918724, 0.022451035678386688, -0.01289896946400404, -0.02246442250907421, 0.015917876735329628, 0.013206885196268559, -0.014579115435481071, -0.002242425922304392, -0.0010567849967628717, 0.002655768534168601, 0.0006116467993706465, 0.013006070628762245, 0.024378851056098938, -0.003266578773036599, 0.006626870948821306, -0.009639084339141846, 0.015261884778738022, -0.02694927528500557, 0.02162100188434124, 0.008112896233797073, -0.026386994868516922, 0.016881786286830902, -0.02089807018637657, -0.026453932747244835, -0.011473188176751137, -0.028970805928111076, -0.02961341105401516, -0.006188426166772842, 0.002182181691750884, 0.004344281740486622, 0.011011315509676933, -0.006827685050666332, 0.009029948152601719, 0.0015763919800519943, 0.0075706979259848595, -0.011533432640135288, -0.02203601785004139, -0.018314260989427567, -0.025583738461136818, 0.022330546751618385, -0.03890441730618477, 0.019037192687392235, 0.014445239678025246, 0.0022390789818018675, -0.027953345328569412, 0.01969318464398384, -0.019974324852228165, -0.014164099469780922, 0.008199915289878845, 0.0008442566613666713, 0.003725104732438922, -0.011553513817489147, -0.011473188176751137, 0.023334616795182228, -0.008400729857385159, 0.011406250298023224, 0.007885306142270565, -0.02093823440372944, 0.01755116693675518, -0.01376247126609087, -0.01838119886815548, 0.01917106844484806, -0.01279856264591217, -0.02579793892800808, -0.01538237277418375, 0.01271823700517416, 0.021272923797369003, 0.0005706471856683493, 0.005903939250856638, 0.014552340842783451, 0.015810776501893997, 0.014766542240977287, -0.01603836566209793, -0.0003526800428517163, -0.007845143787562847, 0.004970152862370014, -0.002126957755535841, -0.024539504200220108, 0.0015303720720112324, 0.008969703689217567, 0.0027461349964141846, 0.006509729195386171, -0.01994754932820797, -0.009331169538199902, 0.03649464622139931, 0.02314719185233116, 0.016426606103777885, -0.014498789794743061, 0.02684217318892479, -0.0007497065817005932, 0.02554357424378395, 0.01915767975151539, 0.017899245023727417, -0.015288659371435642, 0.02773914486169815, 0.00015939632430672646, 0.007778205908834934, 0.018407974392175674, -0.008748807944357395, -0.02694927528500557, 0.01713615097105503, 0.01801973208785057, 0.0008266853983514011, 0.012222895398736, 0.04380428418517113, -0.023120416328310966, -0.009337862953543663, 0.017939407378435135, 0.0074836784042418, -0.023334616795182228, -0.007443515583872795, -0.0010659890249371529, 0.020871296525001526, 0.011138497851788998, -0.012832031585276127, -0.6456044912338257, -0.014552340842783451, 0.017484229058027267, -0.012115794233977795, -0.0034573522862046957, 0.010121039114892483, -0.0011714164866134524, 0.01785908080637455, -0.016426606103777885, 0.01538237277418375, -0.013534881174564362, 0.012805256061255932, 0.0006769114406779408, -0.022852662950754166, -0.026092467829585075, -0.027926571667194366, -0.013039539568126202, -0.00830701645463705, 0.031139599159359932, -0.006164997816085815, -0.02611924149096012, 0.004387791734188795, -0.006108100526034832, 0.0072493948973715305, 0.008353873156011105, 0.015676898881793022, 0.020509829744696617, -0.016105303540825844, -0.015650125220417976, 0.010515973903238773, -0.030175691470503807, 0.03204995393753052, -0.0017805531388148665, 0.0056227995082736015, 0.040136076509952545, -0.0022223445121198893, 0.0030105405021458864, 0.022866051644086838, 0.013668757863342762, 0.021808428689837456, -0.012336689978837967, 0.024378851056098938, 0.03954702243208885, -0.0028113997541368008, 0.025664063170552254, -0.00548222940415144, 0.021768266335129738, -0.010094263590872288, 0.0003871950029861182, 0.0241780374199152, -0.005867123603820801, 0.019559308886528015, -0.000377781834686175, 0.001261782948859036, -0.015730449929833412, -0.002237405627965927, -0.007162375375628471, -0.02146035060286522, 0.0009747859439812601, 0.0026674827095121145, -0.0057165129110217094, 0.008655094541609287, -0.022544747218489647, -0.011131804436445236, -0.01958608441054821, 0.02856917679309845, 0.012336689978837967, 0.011801185086369514, 0.018916703760623932, -0.0066201770678162575, 0.014659442007541656, 0.004689013119786978, -0.01013442687690258, -0.03515588492155075, 0.010054100304841995, -0.004340935032814741, 0.026025528088212013, -0.013019458390772343, -0.005673002917319536, 0.011312536895275116, 0.0013747409684583545, -0.00547218881547451, 7.080794603098184e-05, -0.0010944376699626446, 0.01607852801680565, 0.008929540403187275, -0.02172810398042202, 0.00571985961869359, 0.003490821458399296, 0.012283138930797577, 0.025463249534368515, 0.0025536881294101477, 0.011185354553163052, -0.017992958426475525, 0.010930989868938923, 0.02230377122759819, -0.023321229964494705, 0.0025202189572155476, 0.012390240095555782, -0.03170187771320343, -0.003520943457260728, -0.011841347441077232, 0.02370947040617466, 0.007282864302396774, 0.01650693267583847, 0.013193497434258461, -0.013949898071587086, -0.010355322621762753, 0.036066241562366486, -0.03818148374557495, -0.015275271609425545, 0.005187701899558306, -0.018889928236603737, -0.017618104815483093, 0.006600095424801111, -0.01665419526398182, 0.00783175602555275, 0.018622176721692085, -0.015061070211231709, -0.019974324852228165, 0.005164273548871279, -2.9782220735796727e-05, 0.013012764044106007, -0.03906506672501564, 0.015502861700952053, 0.005204436369240284, 0.006499688606709242, -0.003090866142883897, -0.0010735195828601718, -0.01049589179456234, 0.0033569452352821827, -0.0045149740763008595, 0.020978396758437157, 0.009210680611431599, 0.014846867881715298, -0.005047131795436144, 0.013802633620798588, -0.010904214344918728, 0.016774684190750122, -0.011325924657285213, -0.0029034395702183247, -0.001386455143801868, -0.006041162647306919, -0.003771961433812976, -0.02480725571513176, -0.02579793892800808, -0.0007149824523366988, -0.002642381004989147, -0.030041813850402832, -0.027498167008161545, 0.009779654443264008, -0.0185418501496315, -0.021607615053653717, -0.005960837006568909, 0.0074836784042418, -0.0010919275227934122, -0.01571706309914589, 0.01543592382222414, -0.004866398870944977, -0.02208956889808178, 0.01602497696876526, 0.0035744940396398306, -0.02779269404709339, -0.01475315447896719, 0.009833205491304398, -0.010268302634358406, 0.04099288582801819, 0.013461249880492687, 0.006600095424801111, -0.027873020619153976, 0.0012266404228284955, -0.013949898071587086, -0.0015337190125137568, -0.0008810725994408131, 0.03740500286221504, 0.017015662044286728, -0.02878337912261486, 0.01376247126609087, 0.016627419739961624, 0.011607064865529537, -0.007389965001493692, -0.013166721910238266, -0.02532937191426754, -0.007021805737167597, 0.018394585698843002, 0.016105303540825844, 0.004120039287954569, 0.014994132332503796, -0.02423158846795559, 0.020871296525001526, -0.0006279629305936396, -0.007784899789839983, -0.01801973208785057, 0.009813123382627964, 0.012597748078405857, 0.030148915946483612, 0.0088559091091156, 0.00596753042191267, 0.0033619655296206474, 0.02862272784113884, 0.011265680193901062, 0.011138497851788998, 0.02214311994612217, -0.010455729439854622, -0.01828748546540737, -0.03842246159911156, 0.009752878919243813, -0.031621553003787994, 0.0212996993213892, 0.0025720959529280663, -0.005709819030016661, -0.027350902557373047, -0.02632005698978901, -0.03234448283910751, 0.009712716564536095, 0.018394585698843002, -0.009732797741889954, 0.030363118276000023, -0.010054100304841995, -0.016828235238790512, 0.011613758280873299, 0.016855010762810707, 0.017216475680470467, -0.008273547515273094, 0.004893174394965172, 0.0032967007718980312, -0.0019311638316139579, 0.011680696159601212, 0.010857357643544674, -0.0015220048371702433, 8.377720223506913e-05, 0.01875605247914791, 0.015368985012173653, 0.031353797763586044, -0.01013442687690258, -0.02167455293238163, 0.0024649950210005045, -0.0015939632430672646, 0.04184969142079353, 0.004638809245079756, 0.02615940570831299, 0.020228689536452293, 0.016373055055737495, -0.001106151845306158, 0.02574438974261284, -0.031675104051828384, 0.0442059151828289, 0.00973949208855629, 0.030416667461395264, 0.013695533387362957, 0.00031586410477757454, 0.002749481936916709, -0.0013362516183406115, 0.008153058588504791, 0.01760471612215042, -0.03510233387351036, -0.0022072833962738514, 0.02083113230764866, 0.014659442007541656, 0.02575777657330036, 0.033549368381500244, 0.03060409426689148, 0.01654709503054619, -0.017511002719402313, -0.007543922867625952, 0.0015379026299342513, -0.010462422855198383, 0.007677799090743065, -0.0044681173749268055, -0.01812683418393135, 0.0018374505452811718, -0.017926020547747612, 0.009993856772780418, 0.00771796191111207, 0.031675104051828384, 0.022892825305461884, -0.004879786632955074, 0.015181559138000011, 0.0022223445121198893, 0.003467393107712269, -0.00917051825672388, -0.03413842245936394, 0.02721702679991722, 0.0240307729691267, -0.014900418929755688, -0.003497515106573701, -0.010462422855198383, -0.021594226360321045, -0.021085496991872787, 0.019452208653092384, -0.01739051565527916, -0.007624248508363962, -0.008688563480973244, 0.029800837859511375, -0.004983540624380112, -0.016051752492785454, 0.030684420838952065, -0.01376247126609087, 0.017899245023727417, -0.0014584135496988893, 0.005458801053464413, -0.001113682403229177, -0.022999927401542664, -0.0038388995453715324, 0.008782276883721352, -0.0030590705573558807, 0.012624523602426052, -0.011807878501713276, 0.023200741037726402, -0.017939407378435135, 0.01827409863471985, -0.009839898906648159, -0.013461249880492687, 0.010382097214460373, 0.002767889993265271, -0.003795389784500003, -0.02741784043610096, -0.014378301799297333, 0.004387791734188795, -0.012082325294613838, -0.002431526081636548, -0.024419015273451805, -0.04466109350323677, -0.016573870554566383, 0.13719630241394043, 0.02590504102408886, -0.00403301976621151, 0.007021805737167597, -0.006486300844699144, 0.0037083702627569437, -0.003395434468984604, -0.004461423493921757, 0.011332618072628975, -0.018461523577570915, 0.002367934910580516, 0.009324475191533566, -0.0032833132427185774, -0.003731798380613327, 0.012517422437667847, 0.003226415952667594, 0.018822990357875824, -0.025677450001239777, -0.010060794651508331, -0.013990060426294804, -0.01472637988626957, -0.005027050152420998, 0.021821817383170128, 0.032826438546180725, -0.02428513765335083, -0.01634628139436245, 0.031246699392795563, 0.026306668296456337, 0.012691461481153965, 0.003889102954417467, -0.002913480391725898, 0.014980744570493698, 0.02241087146103382, -0.0004777706053573638, -0.02302670292556286, -0.002781277522444725, 0.017162924632430077, -0.033817119896411896, 0.023227516561746597, 0.016413219273090363, 0.013153334148228168, 9.360873082187027e-05, 0.004320853389799595, -0.01154012605547905, 0.029907938092947006, -0.01634628139436245, 0.009157130494713783, 0.0020901416428387165, 0.01021475251764059, 0.0007053600857034326, 0.016948724165558815, -0.006663686595857143, -0.0106498496606946, -0.012939132750034332, 0.0024951172526925802, 0.012544197961688042, -0.0002017555816564709, -0.005006968975067139, -0.019238006323575974, 0.02329445444047451, -0.026909111067652702, -0.03411164879798889, 0.002063366584479809, -0.01650693267583847, 0.005686390679329634, -0.019666410982608795, -0.0056930845603346825, 0.003350251354277134, -0.0167612973600626, -0.013220272958278656, -0.006221895571798086, -0.008420811034739017, -0.03834213688969612, 0.02459305338561535, 0.009444964118301868, 0.011004622094333172, 0.03293353691697121, 0.0016491871792823076, 0.005070560146123171, -0.0012902315938845277, 0.006767440587282181, -0.042278096079826355, -0.0022859356831759214, 0.004946724511682987, -0.0013019457692280412, 0.00628213956952095, -0.01822054758667946, -0.00854129996150732, -0.02433868870139122, 0.037726305425167084, -0.00562949338927865, 0.0016885133227333426, 0.014619278721511364, -0.02183520421385765, -0.002321078209206462, 0.01796618290245533, 0.024218199774622917, 0.018033120781183243, -0.002704298822209239, -0.006185079459100962, 0.015904489904642105, -0.030041813850402832, -0.016908559948205948, -0.0203224029392004, -0.005957489833235741, -0.016373055055737495, 0.0074769845232367516, 0.02590504102408886, -0.01289896946400404, -0.011098334565758705, -0.005438719876110554, -0.011607064865529537, 0.0039058374240994453, 0.017484229058027267, -0.004863052163273096, 0.0024716889020055532, 0.01947898417711258, 0.007222619839012623, 0.001441679080016911, -0.02365592122077942, 0.0056897373870015144, -0.018367810174822807, 0.035798490047454834, 0.02194230444729328, -0.0063256495632231236, -0.008661787956953049, 0.006837725639343262, -0.021487126126885414, 0.018207158893346786, 0.0043978323228657246, 0.002235732041299343, 0.020603543147444725, -0.012269752100110054, -0.022009244188666344, -0.011238904669880867, -0.01645338162779808, -0.014445239678025246, 0.021540677174925804, 0.009913531132042408, 0.008159752935171127, -0.014485402964055538, -0.011707471683621407, -0.00022989050194155425, -0.04701731353998184, 0.014405076391994953, -0.014699604362249374, 0.006265405099838972, 0.000786940916441381, -0.01755116693675518, 0.0030791519675403833, -0.030577318742871284, -0.007256088778376579, -0.024834031239151955, -0.0010777032002806664, -0.0423048697412014, -0.021179210394620895, -0.0007501249783672392, -0.026547646149992943, 0.03692304715514183, 0.02684217318892479, 0.019345106557011604, 0.0041702426970005035, -0.012055549770593643, 0.0120890187099576, 0.01522172149270773, 0.01645338162779808, -0.007008417975157499, 0.023588981479406357, -0.009953693486750126, 0.04289392754435539, 0.031996406614780426, 0.018247323110699654, -0.028488850221037865, 0.008869296871125698, 0.008581462316215038, 0.02084452100098133, -0.028194323182106018, -0.004401179030537605, -0.011198742315173149, -0.022076182067394257, -0.023856734856963158, -0.008835827000439167, -0.002734420821070671, -0.0035811876878142357, -0.014284588396549225, 7.746252776996698e-06, 0.04931998252868652, -0.012450484558939934, 0.029185006394982338, -0.011894898489117622, 0.02167455293238163, -0.015047682449221611, -0.004223793279379606, -0.008849214762449265, -0.014927193522453308, -0.02057676762342453, -0.04626760631799698, 0.0051709674298763275, 0.03373679518699646, -0.013320679776370525, 0.009023253805935383, -0.0013772511156275868, -0.010382097214460373, -0.015168171375989914, 0.013521494343876839, 0.010669930838048458, -0.018608788028359413, -0.018501687794923782, 0.016828235238790512, -0.019974324852228165, -0.00033385370625182986, -0.00965916644781828, -0.027190251275897026, -0.029907938092947006, 0.0012400280684232712, 0.0006639421335421503, 0.01015450805425644, 0.010837276466190815, -0.007597472984343767, -0.015128008089959621, -0.027297353371977806, -0.014364914037287235, 0.008782276883721352, -0.005820266902446747, 0.011272373609244823, 0.007543922867625952, 0.00016619471716694534, -0.013789246790111065, 0.02172810398042202, 0.033549368381500244, 0.004357669502496719, 0.005398556590080261, 0.02700282447040081, -0.013775859028100967, -0.0007513800519518554, 0.00041815388249233365, 0.006379199679940939, -0.016774684190750122, -0.03071119636297226, 0.024271750822663307, 0.018836377188563347, -0.012992682866752148, -0.017002273350954056, -0.0008354710298590362, -0.018140221014618874, -0.010254914872348309, -0.01480670552700758, 0.02518210932612419, -0.001659227884374559, -0.010984539985656738, -0.020282240584492683, -0.004571871366351843, -0.006262058392167091, 0.005890551954507828, 0.02255813591182232, -0.01587771438062191, 0.011098334565758705, -0.0019261435372754931, 0.00572990020737052, 0.00644948473200202, -0.01433813851326704, 0.03164832666516304, -0.01827409863471985, 0.0040397136472165585, 0.0010484177619218826, 0.020697256550192833, -0.031086048111319542, 0.0005011989269405603, 0.024820642545819283, 0.024298526346683502, 0.0009639085037633777, 0.004568524658679962, -0.012343383394181728, -0.0011270700488239527, -0.01728341355919838, -0.007938857190310955, -0.026239730417728424, -0.020483054220676422, 0.00014914642088115215, 0.0016567177372053266, 0.007851837202906609, -0.0022240178659558296, -0.034754253923892975, -0.0017253292025998235, -0.003218048717826605, -0.019438819959759712, -0.016279341652989388, -0.018582012504339218, 0.025396311655640602, -0.0009371332707814872, -0.017484229058027267, -0.02178165316581726, -0.0014542299322783947, 0.027444615960121155, -0.004106651525944471, 0.009578839875757694, 0.021072110161185265, 0.003062417497858405, -0.027042988687753677, 0.01522172149270773, -0.038877639919519424, 0.007851837202906609, -0.03547718748450279, -0.005974224302917719, -0.03279966115951538, -0.013909734785556793, 0.00917051825672388, -0.002953643212094903, -0.025918427854776382, -0.020857907831668854, -0.007577391806989908, 0.0018910010112449527, 0.0018290833104401827, -0.017403902485966682, -0.006459525786340237, -0.003008867148309946, -0.00241646496579051, -0.013963285833597183, -0.01980028674006462, 0.05140845105051994, -0.016640808433294296, -0.005783450789749622, 0.0005053825443610549, -0.02532937191426754, -0.009799735620617867, 0.00089613365707919, 0.010763644240796566, 0.012537503615021706, -0.01013442687690258, -0.02266523614525795, -0.010623074136674404, 0.022705400362610817, -0.036949824541807175, -0.03055054321885109, -0.0149673568084836, 0.004394485615193844, -0.02037595398724079, 0.004702400416135788, 0.008547993376851082, -0.012932438403367996, 0.020014489069581032, 0.01303284615278244, 0.01488703116774559, -0.012517422437667847, -0.010040713474154472, -0.01602497696876526, 0.004357669502496719, -0.015342210419476032, -0.013073008507490158, -0.03306741639971733, -0.017939407378435135, 0.027096537873148918, -8.273129060398787e-05, -0.014458627440035343, -0.009726104326546192, -0.020242078229784966, -0.023776408284902573, -0.00950520858168602, -0.03175542876124382, 0.002734420821070671, 0.031166374683380127, 0.02356220781803131, 0.004628768656402826, 0.024164650589227676, -0.011714165098965168, 0.023120416328310966, -0.00443799514323473, -0.0036749010905623436, 0.01927816867828369, -0.037056926637887955, 0.036066241562366486, 0.0077514308504760265, -0.0211524348706007, -0.0005325761740095913, 0.009304394014179707, -0.0036347382701933384, 0.029238557443022728, 0.01613207906484604, -0.0362536683678627, 0.0003723431145772338, 0.0048965211026370525, 0.0051709674298763275, 0.011680696159601212, 0.006784175522625446, 0.0164935439825058, -0.0384492389857769, -0.023388167843222618, -0.0013287210604175925, -0.0023545471485704184, -0.008574768900871277, -0.01755116693675518, 0.01281864382326603, 0.0014215976698324084, 5.653130938299e-05, -0.015757225453853607, -0.001877613365650177, 0.03665529564023018, -0.01921123079955578, 0.028087222948670387, 0.015636736527085304, -0.009257537312805653, 0.018582012504339218, 0.02725718915462494, -0.016640808433294296, -0.005117416847497225, -0.005201089195907116, 0.015061070211231709, 0.012537503615021706, -0.0033569452352821827, 0.00042484767618589103, 0.036173343658447266, -0.02093823440372944, -0.005298149771988392, -0.012477260082960129, 0.02277233824133873, -0.01008087582886219, -0.005455454345792532, -0.002896745689213276, 0.00771796191111207, 0.0073230271227657795, -0.016587257385253906, -0.008688563480973244, 0.013467943295836449, -0.02575777657330036, 0.0033318432979285717, -0.019653022289276123, -0.014953969046473503, -0.010261609219014645, -0.010870745405554771, -0.0026055651251226664, -0.006968255154788494, -0.02282588742673397, -0.0021236108150333166, -0.012631217017769814, -0.007637635804712772, 0.021955693140625954, 0.23198063671588898, 0.003340210532769561, 0.005271374247968197, 0.016252567991614342, -0.013260435312986374, 0.030577318742871284, 0.010141120292246342, 0.011801185086369514, -0.003544371807947755, 0.021018559113144875, -0.01392312254756689, -0.010917602106928825, -0.021594226360321045, 0.004434648435562849, 0.0007823389023542404, -0.008869296871125698, -0.035798490047454834, -0.02345510572195053, -0.007938857190310955, 0.002749481936916709, -0.01917106844484806, 0.00942488294094801, -0.0058938986621797085, -0.014538953080773354, 0.015810776501893997, 0.016051752492785454, 0.0073698838241398335, 0.014980744570493698, 0.00692139845341444, -0.002874990925192833, -0.022892825305461884, -0.006335690151900053, 0.012390240095555782, -0.000747614772990346, -0.0023311187978833914, -0.011787797324359417, -0.024941131472587585, -0.012336689978837967, -0.0055993711575865746, 0.015556411817669868, -0.020616931840777397, 0.03245158493518829, 0.0018876540707424283, 0.007242701482027769, -0.004287384450435638, 0.041448064148426056, -0.00667372765019536, -0.013039539568126202, 0.0083806486800313, 0.006014387123286724, -0.03175542876124382, 0.011707471683621407, 0.01791263185441494, 0.02565067633986473, 0.0006677074125036597, -0.015569799579679966, 0.0005300659686326981, 0.003358618589118123, -0.018394585698843002, -0.013675451278686523, -0.015757225453853607, 0.00861493218690157, -0.013635288923978806, 0.039921876043081284, -0.013882959261536598, 0.04053770750761032, 0.020871296525001526, 0.009250843897461891, 0.007952244952321053, -0.013019458390772343, -0.030068589374423027, 0.011841347441077232, -0.01151335146278143, -0.004846317693591118, -0.017564553767442703, -0.01733696460723877, 0.012537503615021706, 0.01135939359664917, 0.014016835950314999, -0.0024348730221390724, 0.003607962979003787, -0.01692194864153862, 0.010562830604612827, 0.004247221630066633, -0.00266246241517365, -0.035075556486845016, 0.022384095937013626, -0.0034874745178967714, -0.007490372285246849, 0.004682319238781929, 0.0035477187484502792, -0.015810776501893997, -0.014873643405735493, -0.00848774891346693, -0.0013136599445715547, -0.00976626668125391, 0.010362016037106514, 0.035022005438804626, -0.020094813778996468, 0.01859540119767189, -0.031407348811626434, 0.02172810398042202, 0.033442266285419464, -0.011064865626394749, -0.004893174394965172, -0.0010484177619218826, -0.001434985315427184, 0.039975427091121674, 0.020710645243525505, -0.026360219344496727, -0.0004292404919397086, -0.021607615053653717, -0.004451382905244827, -0.006914704572409391, -0.0019964284729212523, 0.018193772062659264, 0.02282588742673397, -0.021433575078845024, 0.02569083869457245, 0.0027327474672347307, -0.004769338760524988, -0.035691387951374054, -0.031166374683380127, -0.002039938233792782, 0.0015805755974724889, -0.020175140351057053, -0.0075706979259848595, -0.005197742488235235, -0.004056448116898537, -0.024927744641900063, 0.0060445093549788, -0.011018008925020695, 0.03357614576816559, -0.003554412629455328, -0.001986387651413679, -0.0008844194817356765, 0.02188875526189804, 9.198757470585406e-05, -0.01157359592616558, 0.0019211231265217066, -0.00507725402712822, 0.0004426281084306538, 0.0055960239842534065, -0.013481331057846546, 0.00846097432076931, -0.014980744570493698, 0.02507500723004341, -0.025516798719763756, -0.0013119864743202925, -0.0033251496497541666, -0.03858311474323273, 0.02627989463508129, 0.008608237840235233, -0.018983641639351845, 0.016841622069478035, -0.029265332967042923, -0.02381657250225544, -0.03545041009783745, -0.01681484654545784, 0.015529637224972248, -0.03852956369519234, 0.024686766788363457, 0.023281067609786987, 0.004605340305715799, -0.019023803994059563, -0.009150436148047447, -0.17104020714759827, 0.03346904367208481, 0.004354322329163551, -0.006837725639343262, 9.397479880135506e-05, -0.007309639360755682, 0.00911027379333973, -0.014712992124259472, -0.0008484402787871659, -0.00233781267888844, 0.01791263185441494, 0.005883858073502779, -0.017216475680470467, -0.011225517839193344, 0.0003819654812105, -0.018863152712583542, -0.022692011669278145, 0.010522667318582535, 0.022437646985054016, 0.010221445932984352, 0.047392167150974274, -0.027083151042461395, 0.011319230310618877, -0.04361685737967491, -0.001145477988757193, -0.0149673568084836, -0.009277618490159512, 0.02005465142428875, -0.012376852333545685, -0.019934162497520447, -0.02036256715655327, -0.009853286668658257, 0.006974949035793543, 0.023334616795182228, 0.005950795952230692, 0.00274278805591166, 0.0021102232858538628, -0.0019964284729212523, -0.0013805980561301112, 0.015623349696397781, 0.0439649373292923, 0.020764194428920746, -0.012517422437667847, -0.006496341433376074, -0.015449310652911663, 0.01279856264591217, 0.005766716320067644, -0.004755950998514891, -0.006814297288656235, -0.003343557473272085, -0.01598481461405754, -0.043429430574178696, -0.011145191267132759, 0.01953253336250782, 0.0174440648406744, -0.004819542169570923, -0.03657497093081474, -0.006228588987141848, -0.014231037348508835, -0.009719409979879856, -0.0068477666936814785, 0.013695533387362957, 0.00506721343845129, 0.002038264647126198, -0.015837552025914192, -0.007905388250946999, -0.023669308051466942, -0.007356496062129736, -0.03368324413895607, 0.010274996049702168, -0.03279966115951538, 0.006007693242281675, -0.007450209464877844, -0.02950630895793438, 0.005003622267395258, 0.01884976588189602, -0.0044413418509066105, 0.002751155523583293, 0.008025876246392727, 0.006315608508884907, -0.0177118182182312, 0.023200741037726402, -0.01733696460723877, 0.007584085687994957, 0.005355047062039375, 0.011038091033697128, 0.010589605197310448, 0.0029569901525974274, -0.008440893143415451, -0.029104681685566902, 0.008829133585095406, -0.03676239773631096, 0.018247323110699654, -0.012102406471967697, -0.008447586558759212, 0.013481331057846546, 0.023588981479406357, -0.014445239678025246, 0.0023562207352370024, -0.019519146531820297, 0.0013780879089608788, -0.02204940654337406, 0.0029168270993977785, 0.017899245023727417, 0.0054654949344694614, 0.01660064607858658, 0.027350902557373047, 0.04324200376868248, 0.013856184668838978, -0.0054420665837824345, -0.015114620327949524, 0.01102470327168703, 0.009257537312805653, 0.003929265774786472, 0.009244149550795555, -0.007356496062129736, -0.010348628275096416, -0.0007384108030237257, 0.021487126126885414, -0.028381749987602234, 0.06345730274915695, 0.005137498024851084, -0.023629145696759224, 0.005478882696479559, 0.004732522647827864, -0.012296526692807674, -0.1011032909154892, 0.004304118920117617, 0.006305567920207977, 0.01467282883822918, -0.009880061261355877, 0.03143412619829178, 0.0030657644383609295, 0.04152838885784149, -0.013099784031510353, 0.03290676325559616, -0.01480670552700758, -0.030282791703939438, -0.007617554627358913, 0.013595125637948513, 0.018421361222863197, 0.00241479161195457, 0.0012592728016898036, -0.004458076786249876, -0.005428678821772337, 0.026146017014980316, -0.0044212606735527515, 0.002905112924054265, 0.009157130494713783, -0.013963285833597183, -0.012999377213418484, -0.014846867881715298, -0.0211524348706007, 0.016252567991614342, -0.009083498269319534, 0.013816021382808685, -0.005308190360665321, 0.014953969046473503, 0.01706921122968197, 0.00627879286184907, -0.020871296525001526, 0.003490821458399296, -0.0332280658185482, -0.02203601785004139, 0.027029599994421005, -0.015328822657465935, 0.004776032641530037, -0.020496442914009094, -0.0027160129975527525, -0.028381749987602234, -0.007363189943134785, -0.0024599747266620398, -0.006031121592968702, 0.005281415302306414, 0.022009244188666344, -0.01656048186123371, -0.02428513765335083, 0.010020631365478039, -0.0014249446103349328, -0.030898621305823326, 0.00443799514323473, 0.005187701899558306, -0.001059295260347426, -0.014699604362249374, -0.005227864719927311, 0.002454954432323575, 0.00030477746622636914, 0.01071009412407875, -0.010442341677844524, 0.015944652259349823, -0.0012893949169665575, -0.024767093360424042, -0.047606367617845535, 0.0022775684483349323, 0.007778205908834934, -0.012825338169932365, -0.0022240178659558296, 0.013554963283240795, -0.022892825305461884, 0.008869296871125698, -0.0288369283080101, 0.007918776012957096, -0.037940509617328644, -0.0014174140524119139, 0.020536605268716812, -0.02768559381365776, -0.00047484206152148545, -0.0174440648406744, 0.016828235238790512, -0.007597472984343767, 0.0252758227288723, 0.009826511144638062, -0.0054420665837824345, 0.01185473520308733, 0.0018960214219987392, -0.012524116784334183, 0.00861493218690157, 0.0318625271320343, -0.002891725394874811, -0.009177211672067642, 0.004334241151809692, 0.032505135983228683, 0.008400729857385159, 0.0021369983442127705, 0.008547993376851082, 0.007885306142270565, -0.0063256495632231236, 0.0018910010112449527, -0.06361795961856842, 0.022183282300829887, 0.0005267190863378346, 0.0012040488654747605, -0.005783450789749622, 0.014833481051027775, -0.0060445093549788, 0.0002813491446431726, 0.02037595398724079, 0.013789246790111065, -0.006914704572409391, 0.02042950503528118, 0.02219666913151741, -0.012316607870161533, -0.03703014925122261, 0.021554064005613327, 0.014405076391994953, 0.005408597644418478, 0.03743177652359009, 0.0060445093549788, 0.005361740943044424, 0.029238557443022728, 0.014940581284463406, 0.009471739642322063, -0.0006367485621012747, -0.004354322329163551, -0.01724325120449066, 0.006051203235983849, 0.011158579029142857, -0.008039264008402824, -0.0016140446532517672, -0.013635288923978806, -0.01143971923738718, 0.01823393441736698, -0.007135600317269564, -0.027444615960121155, 0.009793042205274105, -0.003842246253043413, 0.005010315682739019, 0.002568749012425542, -0.031407348811626434, -0.024298526346683502, -0.01681484654545784, -0.017457453534007072, -0.004156854934990406, -0.0058738174848258495, -0.005709819030016661, -0.013749083504080772, 0.0015412494540214539, -0.0039694285951554775, -0.011379474774003029, 0.0008229201193898916, -0.03154122456908226, 0.003915878012776375, -0.01062976848334074, -0.01447201520204544, 0.003929265774786472, 0.014083773829042912, 0.0031527839601039886, -0.027605267241597176, 0.034031324088573456, 0.010335240513086319, 0.0022574870381504297, -0.010034019127488136, 0.02862272784113884, -0.015489473938941956, -0.027712369337677956, 0.007082049734890461, 0.026333443820476532, -0.02532937191426754, -0.035022005438804626, -0.011894898489117622, -0.0019261435372754931, 0.02105872333049774, -0.008581462316215038, -0.007644329685717821, 0.012671380303800106, 0.0033100885339081287, 0.011346005834639072, 0.02162100188434124, 0.022062793374061584, -0.004136773757636547, -0.012035468593239784, 0.03622689098119736, -0.006215201690793037, 0.015114620327949524, -0.004889827221632004, 0.020081426948308945, 0.011131804436445236, 0.0020683868788182735, -0.02579793892800808, -0.0028498892206698656, -0.007008417975157499, 0.0009229088900610805, -0.010930989868938923, 0.005920673720538616, -0.004856358282268047, 0.00017759510956238955, 0.026467319577932358, -0.00037213394534774125, -0.005351700354367495, -0.018059896305203438, -0.0018742665415629745, 0.009752878919243813, -0.0029636838007718325, 0.025838103145360947, -0.028167547658085823, 0.0019378577126190066, -0.02486080676317215, 0.023696083575487137, 0.02136663720011711, 0.023374781012535095, 0.00905672274529934, 0.028033671900629997, -0.00395604083314538, 0.02203601785004139, 0.005388516001403332, -0.02095162123441696, -0.006375852972269058, 0.04559822753071785, 0.026708297431468964, -0.011325924657285213, 0.0066201770678162575, -0.010676625184714794, 0.02611924149096012, 0.008481055498123169, -0.001496066222898662, -0.0014458626974374056, 0.006208507809787989, 0.004314159508794546, 0.04075190797448158, -0.019452208653092384, -0.04393815994262695, 0.011807878501713276, -0.010690012946724892, 0.008467667736113071, 0.011158579029142857, 0.02516872063279152, 0.0006961561157368124, 0.04795444756746292, 0.01780553162097931, 0.0019361842423677444, -0.0063959346152842045, -0.010683318600058556, 0.01942543312907219, -0.008969703689217567, 0.005017009563744068, 0.00013032008428126574, -0.013160028494894505, 0.03419197350740433, -0.020027875900268555, 0.0036983294412493706, -0.0006095549324527383, -0.027377678081393242, 0.01303284615278244, -0.004163548815995455, 0.016721133142709732, -0.002142018871381879, 0.01175432838499546, 0.0027545022312551737, 0.0029971529729664326, 0.020349178463220596, 0.018394585698843002, -0.007664411328732967, -0.004089917056262493, 0.01287888828665018, -0.020871296525001526, 0.0028080528136342764, -0.015087845735251904, 0.01289896946400404, 0.008494443260133266, -0.02266523614525795, -0.024740317836403847, 0.030148915946483612, -0.01875605247914791, 0.02255813591182232, 0.01729680225253105, 0.018314260989427567, 0.00771796191111207, 0.0032297628931701183, -0.004853011108934879, -0.020228689536452293, -0.03713725134730339, 0.026507483795285225, 0.013816021382808685, -0.008755501359701157, -0.021754879504442215, 0.004391138441860676], 'id': '0c906ab3-3786-477f-b13a-5a98367ceee6', '_distance': 0.4137815535068512}\n" + ] + } + ], + "source": [ + "print(docs[0].metadata)" + ] } ], "metadata": { @@ -215,7 +313,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.6" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/docs/docs/integrations/vectorstores/redis.ipynb b/docs/docs/integrations/vectorstores/redis.ipynb index 666a520a34625..54bfb2b8fcf65 100644 --- a/docs/docs/integrations/vectorstores/redis.ipynb +++ b/docs/docs/integrations/vectorstores/redis.ipynb @@ -6,26 +6,26 @@ "source": [ "# Redis\n", "\n", - "Redis vector database introduction and langchain integration guide.\n", + ">[Redis vector database](https://redis.io/docs/get-started/vector-database/) introduction and langchain integration guide.\n", "\n", "## What is Redis?\n", "\n", - "Most developers from a web services background are probably familiar with Redis. At it's core, Redis is an open-source key-value store that can be used as a cache, message broker, and database. Developers choose Redis because it is fast, has a large ecosystem of client libraries, and has been deployed by major enterprises for years.\n", + "Most developers from a web services background are familiar with `Redis`. At its core, `Redis` is an open-source key-value store that is used as a cache, message broker, and database. Developers choose `Redis` because it is fast, has a large ecosystem of client libraries, and has been deployed by major enterprises for years.\n", "\n", - "On top of these traditional use cases, Redis provides additional capabilities like the Search and Query capability that allows users to create secondary index structures within Redis. This allows Redis to be a Vector Database, at the speed of a cache. \n", + "On top of these traditional use cases, `Redis` provides additional capabilities like the Search and Query capability that allows users to create secondary index structures within `Redis`. This allows `Redis` to be a Vector Database, at the speed of a cache. \n", "\n", "\n", "## Redis as a Vector Database\n", "\n", - "Redis uses compressed, inverted indexes for fast indexing with a low memory footprint. It also supports a number of advanced features such as:\n", + "`Redis` uses compressed, inverted indexes for fast indexing with a low memory footprint. It also supports a number of advanced features such as:\n", "\n", - "* Indexing of multiple fields in Redis hashes and JSON\n", - "* Vector similarity search (with HNSW (ANN) or FLAT (KNN))\n", + "* Indexing of multiple fields in Redis hashes and `JSON`\n", + "* Vector similarity search (with `HNSW` (ANN) or `FLAT` (KNN))\n", "* Vector Range Search (e.g. find all vectors within a radius of a query vector)\n", "* Incremental indexing without performance loss\n", "* Document ranking (using [tf-idf](https://en.wikipedia.org/wiki/Tf%E2%80%93idf), with optional user-provided weights)\n", "* Field weighting\n", - "* Complex boolean queries with AND, OR, and NOT operators\n", + "* Complex boolean queries with `AND`, `OR`, and `NOT` operators\n", "* Prefix matching, fuzzy matching, and exact-phrase queries\n", "* Support for [double-metaphone phonetic matching](https://redis.io/docs/stack/search/reference/phonetic_matching/)\n", "* Auto-complete suggestions (with fuzzy prefix suggestions)\n", @@ -34,7 +34,7 @@ "* Numeric filters and ranges\n", "* Geospatial searches using Redis geospatial indexing\n", "* A powerful aggregations engine\n", - "* Supports for all utf-8 encoded text\n", + "* Supports for all `utf-8` encoded text\n", "* Retrieve full documents, selected fields, or only the document IDs\n", "* Sorting results (for example, by creation date)\n", "\n", @@ -42,7 +42,7 @@ "\n", "## Clients\n", "\n", - "Since redis is much more than just a vector database, there are often use cases that demand usage of a Redis client besides just the langchain integration. You can use any standard Redis client library to run Search and Query commands, but it's easiest to use a library that wraps the Search and Query API. Below are a few examples, but you can find more client libraries [here](https://redis.io/resources/clients/).\n", + "Since `Redis` is much more than just a vector database, there are often use cases that demand the usage of a `Redis` client besides just the `LangChain` integration. You can use any standard `Redis` client library to run Search and Query commands, but it's easiest to use a library that wraps the Search and Query API. Below are a few examples, but you can find more client libraries [here](https://redis.io/resources/clients/).\n", "\n", "| Project | Language | License | Author | Stars |\n", "|----------|---------|--------|---------|-------|\n", @@ -87,7 +87,7 @@ "[redisearch-api-rs-stars]: https://img.shields.io/github/stars/RediSearch/redisearch-api-rs.svg?style=social&label=Star&maxAge=2592000\n", "\n", "\n", - "## Deployment Options\n", + "## Deployment options\n", "\n", "There are many ways to deploy Redis with RediSearch. The easiest way to get started is to use Docker, but there are are many potential options for deployment such as\n", "\n", @@ -98,7 +98,7 @@ "- Kubernetes: [Redis Enterprise Software on Kubernetes](https://docs.redis.com/latest/kubernetes/)\n", "\n", "\n", - "## Examples\n", + "## Additional examples\n", "\n", "Many examples can be found in the [Redis AI team's GitHub](https://github.com/RedisVentures/)\n", "\n", @@ -108,7 +108,7 @@ "- [Vector Search on Azure](https://learn.microsoft.com/azure/azure-cache-for-redis/cache-tutorial-vector-similarity) - Vector search on Azure using Azure Cache for Redis and Azure OpenAI\n", "\n", "\n", - "## More Resources\n", + "## More resources\n", "\n", "For more information on how to use Redis as a vector database, check out the following resources:\n", "\n", @@ -122,9 +122,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Install Redis Python Client\n", + "## Setting up\n", "\n", - "Redis-py is the officially supported client by Redis. Recently released is the RedisVL client which is purpose-built for the Vector Database use cases. Both can be installed with pip." + "\n", + "### Install Redis Python client\n", + "\n", + "`Redis-py` is the officially supported client by Redis. Recently released is the `RedisVL` client which is purpose-built for the Vector Database use cases. Both can be installed with pip." ] }, { @@ -172,7 +175,63 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Sample Data\n", + "### Deploy Redis locally\n", + "\n", + "To locally deploy Redis, run:\n", + "\n", + "```console\n", + "docker run -d -p 6379:6379 -p 8001:8001 redis/redis-stack:latest\n", + "```\n", + "If things are running correctly you should see a nice Redis UI at `http://localhost:8001`. See the [Deployment options](#deployment-options) section above for other ways to deploy.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Redis connection Url schemas\n", + "\n", + "Valid Redis Url schemas are:\n", + "1. `redis://` - Connection to Redis standalone, unencrypted\n", + "2. `rediss://` - Connection to Redis standalone, with TLS encryption\n", + "3. `redis+sentinel://` - Connection to Redis server via Redis Sentinel, unencrypted\n", + "4. `rediss+sentinel://` - Connection to Redis server via Redis Sentinel, booth connections with TLS encryption\n", + "\n", + "More information about additional connection parameters can be found in the [redis-py documentation](https://redis-py.readthedocs.io/en/stable/connections.html)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# connection to redis standalone at localhost, db 0, no password\n", + "redis_url = \"redis://localhost:6379\"\n", + "# connection to host \"redis\" port 7379 with db 2 and password \"secret\" (old style authentication scheme without username / pre 6.x)\n", + "redis_url = \"redis://:secret@redis:7379/2\"\n", + "# connection to host redis on default port with user \"joe\", pass \"secret\" using redis version 6+ ACLs\n", + "redis_url = \"redis://joe:secret@redis/0\"\n", + "\n", + "# connection to sentinel at localhost with default group mymaster and db 0, no password\n", + "redis_url = \"redis+sentinel://localhost:26379\"\n", + "# connection to sentinel at host redis with default port 26379 and user \"joe\" with password \"secret\" with default group mymaster and db 0\n", + "redis_url = \"redis+sentinel://joe:secret@redis\"\n", + "# connection to sentinel, no auth with sentinel monitoring group \"zone-1\" and database 2\n", + "redis_url = \"redis+sentinel://redis:26379/zone-1/2\"\n", + "\n", + "# connection to redis standalone at localhost, db 0, no password but with TLS support\n", + "redis_url = \"rediss://localhost:6379\"\n", + "# connection to redis sentinel at localhost and default port, db 0, no password\n", + "# but with TLS support for booth Sentinel and Redis server\n", + "redis_url = \"rediss+sentinel://localhost\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Sample data\n", "\n", "First we will describe some sample data so that the various attributes of the Redis vector store can be demonstrated." ] @@ -222,13 +281,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Initializing Redis\n", - "\n", - "To locally deploy Redis, run:\n", - "```console\n", - "docker run -d -p 6379:6379 -p 8001:8001 redis/redis-stack:latest\n", - "```\n", - "If things are running correctly you should see a nice Redis UI at http://localhost:8001. See the [Deployment Options](#deployment-options) section above for other ways to deploy.\n", + "### Create Redis vector store\n", "\n", "The Redis VectorStore instance can be initialized in a number of ways. There are multiple class methods that can be used to initialize a Redis VectorStore instance.\n", "\n", @@ -284,7 +337,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Inspecting the Created Index\n", + "## Inspecting the created Index\n", "\n", "Once the ``Redis`` VectorStore object has been constructed, an index will have been created in Redis if it did not already exist. The index can be inspected with both the ``rvl``and the ``redis-cli`` command line tool. If you installed ``redisvl`` above, you can use the ``rvl`` command line tool to inspect the index." ] @@ -615,7 +668,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Connect to an Existing Index\n", + "## Connect to an existing Index\n", "\n", "In order to have the same metadata indexed when using the ``Redis`` VectorStore. You will need to have the same ``index_schema`` passed in either as a path to a yaml file or as a dictionary. The following shows how to obtain the schema from an index and connect to an existing index." ] @@ -730,9 +783,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Custom Metadata Indexing\n", + "## Custom metadata indexing\n", "\n", - "In some cases, you may want to control what fields the metadata maps to. For example, you may want the ``credit_score`` field to be a categorical field instead of a text field (which is the default behavior for all string fields). In this case, you can use the ``index_schema`` parameter in each of the initialization methods above to specify the schema for the index. Custom index schema can either be passed as a dictionary or as a path to a yaml file.\n", + "In some cases, you may want to control what fields the metadata maps to. For example, you may want the ``credit_score`` field to be a categorical field instead of a text field (which is the default behavior for all string fields). In this case, you can use the ``index_schema`` parameter in each of the initialization methods above to specify the schema for the index. Custom index schema can either be passed as a dictionary or as a path to a YAML file.\n", "\n", "All arguments in the schema have defaults besides the name, so you can specify only the fields you want to change. All the names correspond to the snake/lowercase versions of the arguments you would use on the command line with ``redis-cli`` or in ``redis-py``. For more on the arguments for each field, see the [documentation](https://redis.io/docs/interact/search-and-query/basic-constructs/field-and-type-options/)\n", "\n", @@ -749,7 +802,7 @@ " - name: age\n", "```\n", "\n", - "In Python this would look like:\n", + "In Python, this would look like:\n", "\n", "```python\n", "\n", @@ -810,16 +863,16 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Hybrid Filtering\n", + "## Hybrid filtering\n", "\n", - "With the Redis Filter Expression language built into langchain, you can create arbitrarily long chains of hybrid filters\n", + "With the Redis Filter Expression language built into LangChain, you can create arbitrarily long chains of hybrid filters\n", "that can be used to filter your search results. The expression language is derived from the [RedisVL Expression Syntax](https://redisvl.com)\n", "and is designed to be easy to use and understand.\n", "\n", "The following are the available filter types:\n", "- ``RedisText``: Filter by full-text search against metadata fields. Supports exact, fuzzy, and wildcard matching.\n", "- ``RedisNum``: Filter by numeric range against metadata fields.\n", - "- ``RedisTag``: Filter by exact match against string based categorical metadata fields. Multiple tags can be specified like \"tag1,tag2,tag3\".\n", + "- ``RedisTag``: Filter by the exact match against string-based categorical metadata fields. Multiple tags can be specified like \"tag1,tag2,tag3\".\n", "\n", "The following are examples of utilizing these filters.\n", "\n", @@ -858,7 +911,7 @@ "job_starts_with_eng = RedisFilter.text(\"job\") % \"eng*\"\n", "```\n", "\n", - "The following are examples of using hybrid filter for search" + "The following are examples of using a hybrid filter for search" ] }, { @@ -1150,7 +1203,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Delete keys" + "## Delete keys and index" ] }, { @@ -1209,48 +1262,6 @@ " redis_url=\"redis://localhost:6379\",\n", ")" ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Redis connection Url examples\n", - "\n", - "Valid Redis Url scheme are:\n", - "1. `redis://` - Connection to Redis standalone, unencrypted\n", - "2. `rediss://` - Connection to Redis standalone, with TLS encryption\n", - "3. `redis+sentinel://` - Connection to Redis server via Redis Sentinel, unencrypted\n", - "4. `rediss+sentinel://` - Connection to Redis server via Redis Sentinel, booth connections with TLS encryption\n", - "\n", - "More information about additional connection parameter can be found in the redis-py documentation at https://redis-py.readthedocs.io/en/stable/connections.html" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [], - "source": [ - "# connection to redis standalone at localhost, db 0, no password\n", - "redis_url = \"redis://localhost:6379\"\n", - "# connection to host \"redis\" port 7379 with db 2 and password \"secret\" (old style authentication scheme without username / pre 6.x)\n", - "redis_url = \"redis://:secret@redis:7379/2\"\n", - "# connection to host redis on default port with user \"joe\", pass \"secret\" using redis version 6+ ACLs\n", - "redis_url = \"redis://joe:secret@redis/0\"\n", - "\n", - "# connection to sentinel at localhost with default group mymaster and db 0, no password\n", - "redis_url = \"redis+sentinel://localhost:26379\"\n", - "# connection to sentinel at host redis with default port 26379 and user \"joe\" with password \"secret\" with default group mymaster and db 0\n", - "redis_url = \"redis+sentinel://joe:secret@redis\"\n", - "# connection to sentinel, no auth with sentinel monitoring group \"zone-1\" and database 2\n", - "redis_url = \"redis+sentinel://redis:26379/zone-1/2\"\n", - "\n", - "# connection to redis standalone at localhost, db 0, no password but with TLS support\n", - "redis_url = \"rediss://localhost:6379\"\n", - "# connection to redis sentinel at localhost and default port, db 0, no password\n", - "# but with TLS support for booth Sentinel and Redis server\n", - "redis_url = \"rediss+sentinel://localhost\"" - ] } ], "metadata": { @@ -1269,7 +1280,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.1" + "version": "3.10.12" } }, "nbformat": 4, diff --git a/docs/docs/integrations/vectorstores/singlestoredb.ipynb b/docs/docs/integrations/vectorstores/singlestoredb.ipynb index fd20785ad45a2..6cae0d544241e 100644 --- a/docs/docs/integrations/vectorstores/singlestoredb.ipynb +++ b/docs/docs/integrations/vectorstores/singlestoredb.ipynb @@ -106,6 +106,14 @@ "print(docs[0].page_content)" ] }, + { + "cell_type": "markdown", + "id": "035cba66", + "metadata": {}, + "source": [ + "Enhance your search efficiency with SingleStore DB version 8.5 or above by leveraging [ANN vector indexes](https://docs.singlestore.com/cloud/reference/sql-reference/vector-functions/vector-indexing/). By setting `use_vector_index=True` during vector store object creation, you can activate this feature. Additionally, if your vectors differ in dimensionality from the default OpenAI embedding size of 1536, ensure to specify the `vector_size` parameter accordingly. " + ] + }, { "cell_type": "code", "execution_count": null, diff --git a/docs/docs/integrations/vectorstores/vikingdb.ipynb b/docs/docs/integrations/vectorstores/vikingdb.ipynb index 66ab177efd56b..aa8726966ab61 100644 --- a/docs/docs/integrations/vectorstores/vikingdb.ipynb +++ b/docs/docs/integrations/vectorstores/vikingdb.ipynb @@ -220,7 +220,7 @@ "collapsed": false }, "source": [ - "After retreival you can go on querying it as usual." + "After retrieval you can go on querying it as usual." ] } ], diff --git a/docs/docs/modules/data_connection/indexing.ipynb b/docs/docs/modules/data_connection/indexing.ipynb index b888a77958b87..45a5d92a42bc9 100644 --- a/docs/docs/modules/data_connection/indexing.ipynb +++ b/docs/docs/modules/data_connection/indexing.ipynb @@ -60,7 +60,7 @@ " * document addition by id (`add_documents` method with `ids` argument)\n", " * delete by id (`delete` method with `ids` argument)\n", "\n", - "Compatible Vectorstores: `AnalyticDB`, `AstraDB`, `AwaDB`, `Bagel`, `Cassandra`, `Chroma`, `DashVector`, `DatabricksVectorSearch`, `DeepLake`, `Dingo`, `ElasticVectorSearch`, `ElasticsearchStore`, `FAISS`, `HanaDB`, `Milvus`, `MyScale`, `PGVector`, `Pinecone`, `Qdrant`, `Redis`, `ScaNN`, `SupabaseVectorStore`, `SurrealDBStore`, `TimescaleVector`, `Vald`, `Vearch`, `VespaStore`, `Weaviate`, `ZepVectorStore`.\n", + "Compatible Vectorstores: `AnalyticDB`, `AstraDB`, `AwaDB`, `Bagel`, `Cassandra`, `Chroma`, `DashVector`, `DatabricksVectorSearch`, `DeepLake`, `Dingo`, `ElasticVectorSearch`, `ElasticsearchStore`, `FAISS`, `HanaDB`, `Milvus`, `MyScale`, `PGVector`, `Pinecone`, `Qdrant`, `Redis`, `Rockset`, `ScaNN`, `SupabaseVectorStore`, `SurrealDBStore`, `TimescaleVector`, `Vald`, `Vearch`, `VespaStore`, `Weaviate`, `ZepVectorStore`.\n", " \n", "## Caution\n", "\n", diff --git a/docs/docs/modules/data_connection/vectorstores/index.mdx b/docs/docs/modules/data_connection/vectorstores/index.mdx index b1242021a27ca..3b6d12699b8c5 100644 --- a/docs/docs/modules/data_connection/vectorstores/index.mdx +++ b/docs/docs/modules/data_connection/vectorstores/index.mdx @@ -131,7 +131,7 @@ table = db.create_table( raw_documents = TextLoader('../../../state_of_the_union.txt').load() text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) documents = text_splitter.split_documents(raw_documents) -db = LanceDB.from_documents(documents, OpenAIEmbeddings(), connection=table) +db = LanceDB.from_documents(documents, OpenAIEmbeddings()) ``` diff --git a/docs/docs/modules/model_io/llms/index.mdx b/docs/docs/modules/model_io/llms/index.mdx index 8e7a1e95dabb4..ead4a3e1b9f03 100644 --- a/docs/docs/modules/model_io/llms/index.mdx +++ b/docs/docs/modules/model_io/llms/index.mdx @@ -26,4 +26,4 @@ This includes: - [How to write a custom LLM class](./custom_llm) - [How to cache LLM responses](./llm_caching) - [How to stream responses from an LLM](./streaming_llm) -- [How to track token usage in an LLM call)(./token_usage_tracking) +- [How to track token usage in an LLM call](./token_usage_tracking) diff --git a/docs/docs/use_cases/graph/graph_networkx_qa.ipynb b/docs/docs/use_cases/graph/graph_networkx_qa.ipynb index cb70b8718c0e9..a01d7c1127639 100644 --- a/docs/docs/use_cases/graph/graph_networkx_qa.ipynb +++ b/docs/docs/use_cases/graph/graph_networkx_qa.ipynb @@ -298,12 +298,34 @@ "id": "045796cf", "metadata": {}, "outputs": [], - "source": [] + "source": [ + "loaded_graph.get_number_of_nodes()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d24b3407", + "metadata": {}, + "outputs": [], + "source": [ + "loaded_graph.clear_edges()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8afda51", + "metadata": {}, + "outputs": [], + "source": [ + "loaded_graph.clear()" + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3 (ipykernel)", + "display_name": "Python 3.10.6 64-bit", "language": "python", "name": "python3" }, @@ -317,7 +339,12 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.10.6" + }, + "vscode": { + "interpreter": { + "hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49" + } } }, "nbformat": 4, diff --git a/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb new file mode 100644 index 0000000000000..4c464e840d522 --- /dev/null +++ b/docs/docs/use_cases/graph/neptune_sparql_qa.ipynb @@ -0,0 +1,337 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Neptune SPARQL QA Chain\n", + "\n", + "This notebook shows use of LLM to query RDF graph in Amazon Neptune. This code uses a `NeptuneRdfGraph` class that connects with the Neptune database and loads it's schema. The `NeptuneSparqlQAChain` is used to connect the graph and LLM to ask natural language questions.\n", + "\n", + "Requirements for running this notebook:\n", + "- Neptune 1.2.x cluster accessible from this notebook\n", + "- Kernel with Python 3.9 or higher\n", + "- For Bedrock access, ensure IAM role has this policy\n", + "\n", + "```json\n", + "{\n", + " \"Action\": [\n", + " \"bedrock:ListFoundationModels\",\n", + " \"bedrock:InvokeModel\"\n", + " ],\n", + " \"Resource\": \"*\",\n", + " \"Effect\": \"Allow\"\n", + "}\n", + "```\n", + "\n", + "- S3 bucket for staging sample data, bucket should be in same account/region as Neptune." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Seed W3C organizational data\n", + "W3C org ontology plus some instances. \n", + "\n", + "You will need an S3 bucket in the same region and account. Set STAGE_BUCKET to name of that bucket." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "STAGE_BUCKET = \"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%bash -s \"$STAGE_BUCKET\"\n", + "\n", + "rm -rf data\n", + "mkdir -p data\n", + "cd data\n", + "echo getting org ontology and sample org instances\n", + "wget http://www.w3.org/ns/org.ttl \n", + "wget https://raw.githubusercontent.com/aws-samples/amazon-neptune-ontology-example-blog/main/data/example_org.ttl \n", + "\n", + "echo Copying org ttl to S3\n", + "aws s3 cp org.ttl s3://$1/org.ttl\n", + "aws s3 cp example_org.ttl s3://$1/example_org.ttl\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Bulk-load the org ttl - both ontology and instances" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load -s s3://{STAGE_BUCKET} -f turtle --store-to loadres --run" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%load_status {loadres['payload']['loadId']} --errors --details" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup Chain" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "EXAMPLES = \"\"\"\n", + "\n", + "\n", + "Find organizations.\n", + "\n", + "\n", + "\n", + "PREFIX rdf: \n", + "PREFIX rdfs: \n", + "PREFIX org: \n", + "\n", + "select ?org ?orgName where {{\n", + " ?org rdfs:label ?orgName .\n", + "}} \n", + "\n", + "\n", + "\n", + "Find sites of an organization\n", + "\n", + "\n", + "\n", + "PREFIX rdf: \n", + "PREFIX rdfs: \n", + "PREFIX org: \n", + "\n", + "select ?org ?orgName ?siteName where {{\n", + " ?org rdfs:label ?orgName .\n", + " ?org org:hasSite/rdfs:label ?siteName . \n", + "}} \n", + "\n", + "\n", + "\n", + "Find suborganizations of an organization\n", + "\n", + "\n", + "\n", + "PREFIX rdf: \n", + "PREFIX rdfs: \n", + "PREFIX org: \n", + "\n", + "select ?org ?orgName ?subName where {{\n", + " ?org rdfs:label ?orgName .\n", + " ?org org:hasSubOrganization/rdfs:label ?subName .\n", + "}} \n", + "\n", + "\n", + "\n", + "Find organizational units of an organization\n", + "\n", + "\n", + "\n", + "PREFIX rdf: \n", + "PREFIX rdfs: \n", + "PREFIX org: \n", + "\n", + "select ?org ?orgName ?unitName where {{\n", + " ?org rdfs:label ?orgName .\n", + " ?org org:hasUnit/rdfs:label ?unitName . \n", + "}} \n", + "\n", + "\n", + "\n", + "Find members of an organization. Also find their manager, or the member they report to.\n", + "\n", + "\n", + "\n", + "PREFIX org: \n", + "PREFIX foaf: \n", + "\n", + "select * where {{\n", + " ?person rdf:type foaf:Person .\n", + " ?person org:memberOf ?org .\n", + " OPTIONAL {{ ?person foaf:firstName ?firstName . }}\n", + " OPTIONAL {{ ?person foaf:family_name ?lastName . }}\n", + " OPTIONAL {{ ?person org:reportsTo ??manager }} .\n", + "}}\n", + "\n", + "\n", + "\n", + "\n", + "Find change events, such as mergers and acquisitions, of an organization\n", + "\n", + "\n", + "\n", + "PREFIX org: \n", + "\n", + "select ?event ?prop ?obj where {{\n", + " ?org rdfs:label ?orgName .\n", + " ?event rdf:type org:ChangeEvent .\n", + " ?event org:originalOrganization ?origOrg .\n", + " ?event org:resultingOrganization ?resultingOrg .\n", + "}}\n", + "\n", + "\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import boto3\n", + "from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain\n", + "from langchain.chat_models import BedrockChat\n", + "from langchain_community.graphs import NeptuneRdfGraph\n", + "\n", + "host = \"\"\n", + "port = \"\"\n", + "region = \"us-east-1\" # specify region\n", + "\n", + "graph = NeptuneRdfGraph(\n", + " host=host, port=port, use_iam_auth=True, region_name=region, hide_comments=True\n", + ")\n", + "\n", + "schema_elements = graph.get_schema_elements\n", + "# Optionally, you can update the schema_elements, and\n", + "# load the schema from the pruned elements.\n", + "graph.load_from_schema_elements(schema_elements)\n", + "\n", + "bedrock_client = boto3.client(\"bedrock-runtime\")\n", + "llm = BedrockChat(model_id=\"anthropic.claude-v2\", client=bedrock_client)\n", + "\n", + "chain = NeptuneSparqlQAChain.from_llm(\n", + " llm=llm,\n", + " graph=graph,\n", + " examples=EXAMPLES,\n", + " verbose=True,\n", + " top_K=10,\n", + " return_intermediate_steps=True,\n", + " return_direct=False,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Ask questions\n", + "Depends on the data we ingested above" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"How many organizations are in the graph\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"Are there any mergers or acquisitions\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"Find organizations\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"Find sites of MegaSystems or MegaFinancial\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"Find a member who is manager of one or more members.\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\"\"\"Find five members and who their manager is.\"\"\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chain.invoke(\n", + " \"\"\"Find org units or suborganizations of The Mega Group. What are the sites of those units?\"\"\"\n", + ")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.8" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/docs/docs/use_cases/question_answering/quickstart.ipynb b/docs/docs/use_cases/question_answering/quickstart.ipynb index 06138508c59c5..11b2475aec843 100644 --- a/docs/docs/use_cases/question_answering/quickstart.ipynb +++ b/docs/docs/use_cases/question_answering/quickstart.ipynb @@ -514,7 +514,7 @@ "\n", "First we need to define our logic for searching over documents. LangChain defines a [Retriever](/docs/modules/data_connection/retrievers/) interface which wraps an index that can return relevant `Documents` given a string query.\n", "\n", - "The most common type of `Retriever` is the [VectorStoreRetriever](/docs/modules/data_connection/retrievers/vectorstore), which uses the similarity search capabilities of a vector store to facillitate retrieval. Any `VectorStore` can easily be turned into a `Retriever` with `VectorStore.as_retriever()`:" + "The most common type of `Retriever` is the [VectorStoreRetriever](/docs/modules/data_connection/retrievers/vectorstore), which uses the similarity search capabilities of a vector store to facilitate retrieval. Any `VectorStore` can easily be turned into a `Retriever` with `VectorStore.as_retriever()`:" ] }, { diff --git a/docs/docs/use_cases/summarization.ipynb b/docs/docs/use_cases/summarization.ipynb index 21dc63dde8ef4..35917b747213d 100644 --- a/docs/docs/use_cases/summarization.ipynb +++ b/docs/docs/use_cases/summarization.ipynb @@ -224,7 +224,7 @@ "source": [ "## Option 1. Stuff\n", "\n", - "When we use `load_summarize_chain` with `chain_type=\"stuff\"`, we will use the [StuffDocumentsChain](/docs/modules/chains/document/stuff).\n", + "When we use `load_summarize_chain` with `chain_type=\"stuff\"`, we will use the [StuffDocumentsChain](https://api.python.langchain.com/en/latest/chains/langchain.chains.combine_documents.stuff.StuffDocumentsChain.html#langchain.chains.combine_documents.stuff.StuffDocumentsChain).\n", "\n", "The chain will take a list of documents, inserts them all into a prompt, and passes that prompt to an LLM:" ] diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index 29ca5618c40fa..352a225c9d0b2 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -188,7 +188,7 @@ const config = { }, { to: "/docs/contributing", - label: "Developer's guide", + label: "Contributing", }, { type: "docSidebar", diff --git a/docs/vercel.json b/docs/vercel.json index fe74510af2b88..9a1ab9d8abdc4 100644 --- a/docs/vercel.json +++ b/docs/vercel.json @@ -1,5 +1,13 @@ { "redirects": [ + { + "source": "/docs/integrations/llms/huggingface_textgen_inference", + "destination": "/docs/integrations/llms/huggingface_endpoint" + }, + { + "source": "/docs/integrations/llms/huggingface_hub", + "destination": "/docs/integrations/llms/huggingface_endpoint" + }, { "source": "/docs/integrations/llms/watsonxllm", "destination": "/docs/integrations/llms/ibm_watsonx" @@ -209,8 +217,8 @@ "destination": "/docs/langsmith/walkthrough" }, { - "source": "/docs/modules/data_connection/retrievers/self_query/:path*", - "destination": "/docs/integrations/retrievers/self_query/:path*" + "source": "/docs/modules/data_connection/retrievers/self_query/:path+", + "destination": "/docs/integrations/retrievers/self_query/:path+" }, { "source": "/docs/use_cases/more/agents/autonomous_agents/:path*", @@ -594,11 +602,7 @@ }, { "source": "/docs/integrations/cassandra", - "destination": "/docs/integrations/providers/astradb" - }, - { - "source": "/docs/integrations/providers/cassandra", - "destination": "/docs/integrations/providers/astradb" + "destination": "/docs/integrations/providers/cassandra" }, { "source": "/docs/integrations/providers/providers/semadb", @@ -608,10 +612,6 @@ "source": "/docs/integrations/vectorstores/vectorstores/semadb", "destination": "/docs/integrations/vectorstores/semadb" }, - { - "source": "/docs/integrations/vectorstores/cassandra", - "destination": "/docs/integrations/vectorstores/astradb" - }, { "source": "/docs/integrations/vectorstores/async_faiss", "destination": "/docs/integrations/vectorstores/faiss_async" diff --git a/libs/cli/.gitignore b/libs/cli/.gitignore index 1694d2bc1e914..68bc17f9ff210 100644 --- a/libs/cli/.gitignore +++ b/libs/cli/.gitignore @@ -1,3 +1,160 @@ -dist -__pycache__ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env .venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ diff --git a/libs/cli/langchain_cli/integration_template/Makefile b/libs/cli/langchain_cli/integration_template/Makefile index bed6f5bda53a8..2837e81a66a2a 100644 --- a/libs/cli/langchain_cli/integration_template/Makefile +++ b/libs/cli/langchain_cli/integration_template/Makefile @@ -5,16 +5,11 @@ all: help # Define a variable for the test file path. TEST_FILE ?= tests/unit_tests/ +integration_test integration_tests: TEST_FILE = tests/integration_tests/ -integration_tests: TEST_FILE = tests/integration_tests/ - -test integration_tests: - poetry run pytest $(TEST_FILE) - -tests: +test tests integration_test integration_tests: poetry run pytest $(TEST_FILE) - ###################### # LINTING AND FORMATTING ###################### @@ -32,7 +27,7 @@ lint lint_diff lint_package lint_tests: poetry run ruff . poetry run ruff format $(PYTHON_FILES) --diff poetry run ruff --select I $(PYTHON_FILES) - mkdir $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + mkdir -p $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) format format_diff: poetry run ruff format $(PYTHON_FILES) diff --git a/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py b/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py index 3f60c9e6a7224..df19152454266 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/chat_models.py @@ -1,3 +1,4 @@ +"""__ModuleName__ chat models.""" from typing import Any, AsyncIterator, Iterator, List, Optional from langchain_core.callbacks import ( @@ -5,7 +6,7 @@ CallbackManagerForLLMRun, ) from langchain_core.language_models.chat_models import BaseChatModel -from langchain_core.messages import BaseMessage, BaseMessageChunk +from langchain_core.messages import BaseMessage from langchain_core.outputs import ChatGenerationChunk, ChatResult @@ -15,49 +16,51 @@ class Chat__ModuleName__(BaseChatModel): Example: .. code-block:: python - from __module_name__ import Chat__ModuleName__ + from langchain_core.messages import HumanMessage + from __module_name__ import Chat__ModuleName__ model = Chat__ModuleName__() - """ + model.invoke([HumanMessage(content="Come up with 10 names for a song about parrots.")]) + """ # noqa: E501 @property def _llm_type(self) -> str: """Return type of chat model.""" return "chat-__package_name_short__" - def _stream( + def _generate( self, messages: List[BaseMessage], stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, - ) -> Iterator[ChatGenerationChunk]: + ) -> ChatResult: raise NotImplementedError - async def _astream( + # TODO: Implement if __model_name__ supports streaming. Otherwise delete method. + def _stream( self, messages: List[BaseMessage], stop: Optional[List[str]] = None, - run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, - ) -> AsyncIterator[ChatGenerationChunk]: - yield ChatGenerationChunk( - message=BaseMessageChunk(content="Yield chunks", type="ai"), - ) - yield ChatGenerationChunk( - message=BaseMessageChunk(content=" like this!", type="ai"), - ) + ) -> Iterator[ChatGenerationChunk]: + raise NotImplementedError - def _generate( + # TODO: Implement if __model_name__ supports async streaming. Otherwise delete + # method. + async def _astream( self, messages: List[BaseMessage], stop: Optional[List[str]] = None, - run_manager: Optional[CallbackManagerForLLMRun] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, **kwargs: Any, - ) -> ChatResult: + ) -> AsyncIterator[ChatGenerationChunk]: raise NotImplementedError + # TODO: Implement if __model_name__ supports async generation. Otherwise delete + # method. async def _agenerate( self, messages: List[BaseMessage], diff --git a/libs/cli/langchain_cli/integration_template/integration_template/llms.py b/libs/cli/langchain_cli/integration_template/integration_template/llms.py index bd8c2fc37fadb..562fc0f400197 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/llms.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/llms.py @@ -1,5 +1,4 @@ -import asyncio -from functools import partial +"""__ModuleName__ large language models.""" from typing import ( Any, AsyncIterator, @@ -25,6 +24,7 @@ class __ModuleName__LLM(BaseLLM): from __module_name__ import __ModuleName__LLM model = __ModuleName__LLM() + model.invoke("Come up with 10 names for a song about parrots") """ @property @@ -41,6 +41,8 @@ def _generate( ) -> LLMResult: raise NotImplementedError + # TODO: Implement if __model_name__ supports async generation. Otherwise + # delete method. async def _agenerate( self, prompts: List[str], @@ -48,11 +50,9 @@ async def _agenerate( run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, **kwargs: Any, ) -> LLMResult: - # Change implementation if integration natively supports async generation. - return await asyncio.get_running_loop().run_in_executor( - None, partial(self._generate, **kwargs), prompts, stop, run_manager - ) + raise NotImplementedError + # TODO: Implement if __model_name__ supports streaming. Otherwise delete method. def _stream( self, prompt: str, @@ -62,6 +62,8 @@ def _stream( ) -> Iterator[GenerationChunk]: raise NotImplementedError + # TODO: Implement if __model_name__ supports async streaming. Otherwise delete + # method. async def _astream( self, prompt: str, @@ -69,5 +71,4 @@ async def _astream( run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, **kwargs: Any, ) -> AsyncIterator[GenerationChunk]: - yield GenerationChunk(text="Yield chunks") - yield GenerationChunk(text=" like this!") + raise NotImplementedError diff --git a/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py b/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py index 10c22f22b2a64..3683d53c4a961 100644 --- a/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py +++ b/libs/cli/langchain_cli/integration_template/integration_template/vectorstores.py @@ -1,3 +1,4 @@ +"""__ModuleName__ vector stores.""" from __future__ import annotations import asyncio @@ -24,7 +25,7 @@ class __ModuleName__VectorStore(VectorStore): - """Interface for vector store. + """__ModuleName__ vector store. Example: .. code-block:: python diff --git a/libs/cli/langchain_cli/integration_template/pyproject.toml b/libs/cli/langchain_cli/integration_template/pyproject.toml index be76900001b5d..08a778b2fcdb7 100644 --- a/libs/cli/langchain_cli/integration_template/pyproject.toml +++ b/libs/cli/langchain_cli/integration_template/pyproject.toml @@ -12,25 +12,21 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" +langchain-core = "^0.1" [tool.poetry.group.test] optional = true [tool.poetry.group.test.dependencies] -pytest = "^7.3.0" -freezegun = "^1.2.2" -pytest-mock = "^3.10.0" -syrupy = "^4.0.2" -pytest-watcher = "^0.3.4" -pytest-asyncio = "^0.21.1" +pytest = "^7.4.3" +pytest-asyncio = "^0.23.2" langchain-core = {path = "../../core", develop = true} [tool.poetry.group.codespell] optional = true [tool.poetry.group.codespell.dependencies] -codespell = "^2.2.0" +codespell = "^2.2.6" [tool.poetry.group.test_integration] optional = true @@ -41,10 +37,10 @@ optional = true optional = true [tool.poetry.group.lint.dependencies] -ruff = "^0.1.5" +ruff = "^0.1.8" [tool.poetry.group.typing.dependencies] -mypy = "^0.991" +mypy = "^1.7.1" langchain-core = {path = "../../core", develop = true} [tool.poetry.group.dev] @@ -87,8 +83,6 @@ addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5 # Registering custom markers. # https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers markers = [ - "requires: mark tests as requiring a specific library", - "asyncio: mark tests as requiring asyncio", "compile: mark placeholder test used to compile integration tests without running them", ] asyncio_mode = "auto" diff --git a/libs/cli/langchain_cli/integration_template/scripts/lint_imports.sh b/libs/cli/langchain_cli/integration_template/scripts/lint_imports.sh index 695613c7ba8fd..19ccec1480c01 100755 --- a/libs/cli/langchain_cli/integration_template/scripts/lint_imports.sh +++ b/libs/cli/langchain_cli/integration_template/scripts/lint_imports.sh @@ -5,9 +5,10 @@ set -eu # Initialize a variable to keep track of errors errors=0 -# make sure not importing from langchain or langchain_experimental +# make sure not importing from langchain, langchain_experimental, or langchain_community git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_community\.' . && errors=$((errors+1)) # Decide on an exit status based on the errors if [ "$errors" -gt 0 ]; then diff --git a/libs/community/langchain_community/agent_toolkits/__init__.py b/libs/community/langchain_community/agent_toolkits/__init__.py index 3f6bf3033190d..bbb3820cb3fac 100644 --- a/libs/community/langchain_community/agent_toolkits/__init__.py +++ b/libs/community/langchain_community/agent_toolkits/__init__.py @@ -18,6 +18,7 @@ from langchain_community.agent_toolkits.azure_cognitive_services import ( AzureCognitiveServicesToolkit, ) +from langchain_community.agent_toolkits.cogniswitch.toolkit import CogniswitchToolkit from langchain_community.agent_toolkits.connery import ConneryToolkit from langchain_community.agent_toolkits.file_management.toolkit import ( FileManagementToolkit, @@ -51,6 +52,7 @@ "AINetworkToolkit", "AmadeusToolkit", "AzureCognitiveServicesToolkit", + "CogniswitchToolkit", "ConneryToolkit", "FileManagementToolkit", "GmailToolkit", diff --git a/libs/community/langchain_community/agent_toolkits/cogniswitch/__init__.py b/libs/community/langchain_community/agent_toolkits/cogniswitch/__init__.py new file mode 100644 index 0000000000000..df1d84976c49a --- /dev/null +++ b/libs/community/langchain_community/agent_toolkits/cogniswitch/__init__.py @@ -0,0 +1 @@ +"""CogniSwitch Toolkit""" diff --git a/libs/community/langchain_community/agent_toolkits/cogniswitch/toolkit.py b/libs/community/langchain_community/agent_toolkits/cogniswitch/toolkit.py new file mode 100644 index 0000000000000..36ec5ae0f3609 --- /dev/null +++ b/libs/community/langchain_community/agent_toolkits/cogniswitch/toolkit.py @@ -0,0 +1,40 @@ +from typing import List + +from langchain_community.agent_toolkits.base import BaseToolkit +from langchain_community.tools import BaseTool +from langchain_community.tools.cogniswitch.tool import ( + CogniswitchKnowledgeRequest, + CogniswitchKnowledgeSourceFile, + CogniswitchKnowledgeSourceURL, + CogniswitchKnowledgeStatus, +) + + +class CogniswitchToolkit(BaseToolkit): + """ + Toolkit for CogniSwitch. + + Use the toolkit to get all the tools present in the cogniswitch and + use them to interact with your knowledge + """ + + cs_token: str # cogniswitch token + OAI_token: str # OpenAI API token + apiKey: str # Cogniswitch OAuth token + + def get_tools(self) -> List[BaseTool]: + """Get the tools in the toolkit.""" + return [ + CogniswitchKnowledgeStatus( + cs_token=self.cs_token, OAI_token=self.OAI_token, apiKey=self.apiKey + ), + CogniswitchKnowledgeRequest( + cs_token=self.cs_token, OAI_token=self.OAI_token, apiKey=self.apiKey + ), + CogniswitchKnowledgeSourceFile( + cs_token=self.cs_token, OAI_token=self.OAI_token, apiKey=self.apiKey + ), + CogniswitchKnowledgeSourceURL( + cs_token=self.cs_token, OAI_token=self.OAI_token, apiKey=self.apiKey + ), + ] diff --git a/libs/community/langchain_community/cache.py b/libs/community/langchain_community/cache.py index 54996071f8655..5f36aa5856b44 100644 --- a/libs/community/langchain_community/cache.py +++ b/libs/community/langchain_community/cache.py @@ -29,21 +29,24 @@ import warnings from abc import ABC from datetime import timedelta -from functools import lru_cache +from functools import lru_cache, wraps from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Dict, + Generator, List, Optional, + Sequence, Tuple, Type, Union, cast, ) -from sqlalchemy import Column, Integer, String, create_engine, select +from sqlalchemy import Column, Integer, String, create_engine, delete, select from sqlalchemy.engine import Row from sqlalchemy.engine.base import Engine from sqlalchemy.orm import Session @@ -55,20 +58,23 @@ from langchain_core.caches import RETURN_VAL_TYPE, BaseCache from langchain_core.embeddings import Embeddings -from langchain_core.language_models.llms import LLM, get_prompts +from langchain_core.language_models.llms import LLM, aget_prompts, get_prompts from langchain_core.load.dump import dumps from langchain_core.load.load import loads from langchain_core.outputs import ChatGeneration, Generation from langchain_core.utils import get_from_env -from langchain_community.utilities.astradb import AstraDBEnvironment +from langchain_community.utilities.astradb import ( + SetupMode, + _AstraDBCollectionEnvironment, +) from langchain_community.vectorstores.redis import Redis as RedisVectorstore logger = logging.getLogger(__file__) if TYPE_CHECKING: import momento - from astrapy.db import AstraDB + from astrapy.db import AstraDB, AsyncAstraDB from cassandra.cluster import Session as CassandraSession @@ -192,6 +198,20 @@ def clear(self, **kwargs: Any) -> None: """Clear cache.""" self._cache = {} + async def alookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + return self.lookup(prompt, llm_string) + + async def aupdate( + self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE + ) -> None: + """Update cache based on prompt and llm_string.""" + self.update(prompt, llm_string, return_val) + + async def aclear(self, **kwargs: Any) -> None: + """Clear cache.""" + self.clear() + Base = declarative_base() @@ -1294,37 +1314,33 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: """Update based on prompt and llm_string.""" - self._delete_previous(prompt, llm_string) - prompt_md5 = self.get_md5(prompt) - items = [ - self.cache_schema( - id=str(uuid.uuid1()), - prompt=prompt, - prompt_md5=prompt_md5, - llm=llm_string, - response=dumps(gen), - idx=i, - ) - for i, gen in enumerate(return_val) - ] with Session(self.engine) as session, session.begin(): + self._delete_previous(session, prompt, llm_string) + prompt_md5 = self.get_md5(prompt) + items = [ + self.cache_schema( + id=str(uuid.uuid1()), + prompt=prompt, + prompt_md5=prompt_md5, + llm=llm_string, + response=dumps(gen), + idx=i, + ) + for i, gen in enumerate(return_val) + ] for item in items: session.merge(item) - def _delete_previous(self, prompt: str, llm_string: str) -> None: + def _delete_previous(self, session: Session, prompt: str, llm_string: str) -> None: stmt = ( - select(self.cache_schema.response) + delete(self.cache_schema) .where(self.cache_schema.prompt_md5 == self.get_md5(prompt)) # type: ignore .where(self.cache_schema.llm == llm_string) .where(self.cache_schema.prompt == prompt) - .order_by(self.cache_schema.idx) ) - with Session(self.engine) as session, session.begin(): - rows = session.execute(stmt).fetchall() - for item in rows: - session.delete(item) + session.execute(stmt) - def _search_rows(self, prompt: str, llm_string: str) -> List[Row]: + def _search_rows(self, prompt: str, llm_string: str) -> Sequence[Row]: prompt_pd5 = self.get_md5(prompt) stmt = ( select(self.cache_schema.response) @@ -1360,6 +1376,10 @@ class AstraDBCache(BaseCache): (needed to prevent same-prompt-different-model collisions) """ + @staticmethod + def _make_id(prompt: str, llm_string: str) -> str: + return f"{_hash(prompt)}#{_hash(llm_string)}" + def __init__( self, *, @@ -1367,7 +1387,10 @@ def __init__( token: Optional[str] = None, api_endpoint: Optional[str] = None, astra_db_client: Optional[AstraDB] = None, + async_astra_db_client: Optional[AsyncAstraDB] = None, namespace: Optional[str] = None, + pre_delete_collection: bool = False, + setup_mode: SetupMode = SetupMode.SYNC, ): """ Create an AstraDB cache using a collection for storage. @@ -1377,29 +1400,35 @@ def __init__( token (Optional[str]): API token for Astra DB usage. api_endpoint (Optional[str]): full URL to the API endpoint, such as "https://-us-east1.apps.astra.datastax.com". - astra_db_client (Optional[Any]): *alternative to token+api_endpoint*, + astra_db_client (Optional[AstraDB]): + *alternative to token+api_endpoint*, you can pass an already-created 'astrapy.db.AstraDB' instance. + async_astra_db_client (Optional[AsyncAstraDB]): + *alternative to token+api_endpoint*, + you can pass an already-created 'astrapy.db.AsyncAstraDB' instance. namespace (Optional[str]): namespace (aka keyspace) where the collection is created. Defaults to the database's "default namespace". + pre_delete_collection (bool): whether to delete and re-create the + collection. Defaults to False. + async_setup (bool): whether to create the collection asynchronously. + Enable only if there is a running asyncio event loop. Defaults to False. """ - astra_env = AstraDBEnvironment( + self.astra_env = _AstraDBCollectionEnvironment( + collection_name=collection_name, token=token, api_endpoint=api_endpoint, astra_db_client=astra_db_client, + async_astra_db_client=async_astra_db_client, namespace=namespace, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, ) - self.astra_db = astra_env.astra_db - self.collection = self.astra_db.create_collection( - collection_name=collection_name, - ) - self.collection_name = collection_name - - @staticmethod - def _make_id(prompt: str, llm_string: str) -> str: - return f"{_hash(prompt)}#{_hash(llm_string)}" + self.collection = self.astra_env.collection + self.async_collection = self.astra_env.async_collection def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: """Look up based on prompt and llm_string.""" + self.astra_env.ensure_db_setup() doc_id = self._make_id(prompt, llm_string) item = self.collection.find_one( filter={ @@ -1409,18 +1438,27 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: "body_blob": 1, }, )["data"]["document"] - if item is not None: - generations = _loads_generations(item["body_blob"]) - # this protects against malformed cached items: - if generations is not None: - return generations - else: - return None - else: - return None + return _loads_generations(item["body_blob"]) if item is not None else None + + async def alookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + await self.astra_env.aensure_db_setup() + doc_id = self._make_id(prompt, llm_string) + item = ( + await self.async_collection.find_one( + filter={ + "_id": doc_id, + }, + projection={ + "body_blob": 1, + }, + ) + )["data"]["document"] + return _loads_generations(item["body_blob"]) if item is not None else None def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: """Update cache based on prompt and llm_string.""" + self.astra_env.ensure_db_setup() doc_id = self._make_id(prompt, llm_string) blob = _dumps_generations(return_val) self.collection.upsert( @@ -1430,6 +1468,20 @@ def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> N }, ) + async def aupdate( + self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE + ) -> None: + """Update cache based on prompt and llm_string.""" + await self.astra_env.aensure_db_setup() + doc_id = self._make_id(prompt, llm_string) + blob = _dumps_generations(return_val) + await self.async_collection.upsert( + { + "_id": doc_id, + "body_blob": blob, + }, + ) + def delete_through_llm( self, prompt: str, llm: LLM, stop: Optional[List[str]] = None ) -> None: @@ -1443,14 +1495,42 @@ def delete_through_llm( )[1] return self.delete(prompt, llm_string=llm_string) + async def adelete_through_llm( + self, prompt: str, llm: LLM, stop: Optional[List[str]] = None + ) -> None: + """ + A wrapper around `adelete` with the LLM being passed. + In case the llm(prompt) calls have a `stop` param, you should pass it here + """ + llm_string = ( + await aget_prompts( + {**llm.dict(), **{"stop": stop}}, + [], + ) + )[1] + return await self.adelete(prompt, llm_string=llm_string) + def delete(self, prompt: str, llm_string: str) -> None: """Evict from cache if there's an entry.""" + self.astra_env.ensure_db_setup() doc_id = self._make_id(prompt, llm_string) self.collection.delete_one(doc_id) + async def adelete(self, prompt: str, llm_string: str) -> None: + """Evict from cache if there's an entry.""" + await self.astra_env.aensure_db_setup() + doc_id = self._make_id(prompt, llm_string) + await self.async_collection.delete_one(doc_id) + def clear(self, **kwargs: Any) -> None: """Clear cache. This is for all LLMs at once.""" - self.astra_db.truncate_collection(self.collection_name) + self.astra_env.ensure_db_setup() + self.collection.clear() + + async def aclear(self, **kwargs: Any) -> None: + """Clear cache. This is for all LLMs at once.""" + await self.astra_env.aensure_db_setup() + await self.async_collection.clear() ASTRA_DB_SEMANTIC_CACHE_DEFAULT_THRESHOLD = 0.85 @@ -1458,6 +1538,42 @@ def clear(self, **kwargs: Any) -> None: ASTRA_DB_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE = 16 +_unset = ["unset"] + + +class _CachedAwaitable: + """Caches the result of an awaitable so it can be awaited multiple times""" + + def __init__(self, awaitable: Awaitable[Any]): + self.awaitable = awaitable + self.result = _unset + + def __await__(self) -> Generator: + if self.result is _unset: + self.result = yield from self.awaitable.__await__() + return self.result + + +def _reawaitable(func: Callable) -> Callable: + """Makes an async function result awaitable multiple times""" + + @wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> _CachedAwaitable: + return _CachedAwaitable(func(*args, **kwargs)) + + return wrapper + + +def _async_lru_cache(maxsize: int = 128, typed: bool = False) -> Callable: + """Least-recently-used async cache decorator. + Equivalent to functools.lru_cache for async functions""" + + def decorating_function(user_function: Callable) -> Callable: + return lru_cache(maxsize, typed)(_reawaitable(user_function)) + + return decorating_function + + class AstraDBSemanticCache(BaseCache): """ Cache that uses Astra DB as a vector-store backend for semantic @@ -1468,7 +1584,7 @@ class AstraDBSemanticCache(BaseCache): in the document metadata. You can choose the preferred similarity (or use the API default) -- - remember the threshold might require metric-dependend tuning. + remember the threshold might require metric-dependent tuning. """ def __init__( @@ -1478,7 +1594,10 @@ def __init__( token: Optional[str] = None, api_endpoint: Optional[str] = None, astra_db_client: Optional[AstraDB] = None, + async_astra_db_client: Optional[AsyncAstraDB] = None, namespace: Optional[str] = None, + setup_mode: SetupMode = SetupMode.SYNC, + pre_delete_collection: bool = False, embedding: Embeddings, metric: Optional[str] = None, similarity_threshold: float = ASTRA_DB_SEMANTIC_CACHE_DEFAULT_THRESHOLD, @@ -1491,10 +1610,17 @@ def __init__( token (Optional[str]): API token for Astra DB usage. api_endpoint (Optional[str]): full URL to the API endpoint, such as "https://-us-east1.apps.astra.datastax.com". - astra_db_client (Optional[Any]): *alternative to token+api_endpoint*, + astra_db_client (Optional[AstraDB]): *alternative to token+api_endpoint*, you can pass an already-created 'astrapy.db.AstraDB' instance. + async_astra_db_client (Optional[AsyncAstraDB]): + *alternative to token+api_endpoint*, + you can pass an already-created 'astrapy.db.AsyncAstraDB' instance. namespace (Optional[str]): namespace (aka keyspace) where the collection is created. Defaults to the database's "default namespace". + setup_mode (SetupMode): mode used to create the collection in the DB + (SYNC, ASYNC or OFF). Defaults to SYNC. + pre_delete_collection (bool): whether to delete and re-create the + collection. Defaults to False. embedding (Embedding): Embedding provider for semantic encoding and search. metric: the function to use for evaluating similarity of text embeddings. @@ -1505,17 +1631,10 @@ def __init__( The default score threshold is tuned to the default metric. Tune it carefully yourself if switching to another distance metric. """ - astra_env = AstraDBEnvironment( - token=token, - api_endpoint=api_endpoint, - astra_db_client=astra_db_client, - namespace=namespace, - ) - self.astra_db = astra_env.astra_db - self.embedding = embedding self.metric = metric self.similarity_threshold = similarity_threshold + self.collection_name = collection_name # The contract for this class has separate lookup and update: # in order to spare some embedding calculations we cache them between @@ -1527,25 +1646,47 @@ def _cache_embedding(text: str) -> List[float]: return self.embedding.embed_query(text=text) self._get_embedding = _cache_embedding - self.embedding_dimension = self._get_embedding_dimension() - self.collection_name = collection_name + @_async_lru_cache(maxsize=ASTRA_DB_SEMANTIC_CACHE_EMBEDDING_CACHE_SIZE) + async def _acache_embedding(text: str) -> List[float]: + return await self.embedding.aembed_query(text=text) + + self._aget_embedding = _acache_embedding + + embedding_dimension: Union[int, Awaitable[int], None] = None + if setup_mode == SetupMode.ASYNC: + embedding_dimension = self._aget_embedding_dimension() + elif setup_mode == SetupMode.SYNC: + embedding_dimension = self._get_embedding_dimension() - self.collection = self.astra_db.create_collection( - collection_name=self.collection_name, - dimension=self.embedding_dimension, - metric=self.metric, + self.astra_env = _AstraDBCollectionEnvironment( + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + astra_db_client=astra_db_client, + async_astra_db_client=async_astra_db_client, + namespace=namespace, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, + embedding_dimension=embedding_dimension, + metric=metric, ) + self.collection = self.astra_env.collection + self.async_collection = self.astra_env.async_collection def _get_embedding_dimension(self) -> int: return len(self._get_embedding(text="This is a sample sentence.")) + async def _aget_embedding_dimension(self) -> int: + return len(await self._aget_embedding(text="This is a sample sentence.")) + @staticmethod def _make_id(prompt: str, llm_string: str) -> str: return f"{_hash(prompt)}#{_hash(llm_string)}" def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> None: """Update cache based on prompt and llm_string.""" + self.astra_env.ensure_db_setup() doc_id = self._make_id(prompt, llm_string) llm_string_hash = _hash(llm_string) embedding_vector = self._get_embedding(text=prompt) @@ -1560,6 +1701,25 @@ def update(self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE) -> N } ) + async def aupdate( + self, prompt: str, llm_string: str, return_val: RETURN_VAL_TYPE + ) -> None: + """Update cache based on prompt and llm_string.""" + await self.astra_env.aensure_db_setup() + doc_id = self._make_id(prompt, llm_string) + llm_string_hash = _hash(llm_string) + embedding_vector = await self._aget_embedding(text=prompt) + body = _dumps_generations(return_val) + # + await self.async_collection.upsert( + { + "_id": doc_id, + "body_blob": body, + "llm_string_hash": llm_string_hash, + "$vector": embedding_vector, + } + ) + def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: """Look up based on prompt and llm_string.""" hit_with_id = self.lookup_with_id(prompt, llm_string) @@ -1568,6 +1728,14 @@ def lookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: else: return None + async def alookup(self, prompt: str, llm_string: str) -> Optional[RETURN_VAL_TYPE]: + """Look up based on prompt and llm_string.""" + hit_with_id = await self.alookup_with_id(prompt, llm_string) + if hit_with_id is not None: + return hit_with_id[1] + else: + return None + def lookup_with_id( self, prompt: str, llm_string: str ) -> Optional[Tuple[str, RETURN_VAL_TYPE]]: @@ -1575,6 +1743,7 @@ def lookup_with_id( Look up based on prompt and llm_string. If there are hits, return (document_id, cached_entry) for the top hit """ + self.astra_env.ensure_db_setup() prompt_embedding: List[float] = self._get_embedding(text=prompt) llm_string_hash = _hash(llm_string) @@ -1593,7 +1762,37 @@ def lookup_with_id( generations = _loads_generations(hit["body_blob"]) if generations is not None: # this protects against malformed cached items: - return (hit["_id"], generations) + return hit["_id"], generations + else: + return None + + async def alookup_with_id( + self, prompt: str, llm_string: str + ) -> Optional[Tuple[str, RETURN_VAL_TYPE]]: + """ + Look up based on prompt and llm_string. + If there are hits, return (document_id, cached_entry) for the top hit + """ + await self.astra_env.aensure_db_setup() + prompt_embedding: List[float] = await self._aget_embedding(text=prompt) + llm_string_hash = _hash(llm_string) + + hit = await self.async_collection.vector_find_one( + vector=prompt_embedding, + filter={ + "llm_string_hash": llm_string_hash, + }, + fields=["body_blob", "_id"], + include_similarity=True, + ) + + if hit is None or hit["$similarity"] < self.similarity_threshold: + return None + else: + generations = _loads_generations(hit["body_blob"]) + if generations is not None: + # this protects against malformed cached items: + return hit["_id"], generations else: return None @@ -1606,14 +1805,41 @@ def lookup_with_id_through_llm( )[1] return self.lookup_with_id(prompt, llm_string=llm_string) + async def alookup_with_id_through_llm( + self, prompt: str, llm: LLM, stop: Optional[List[str]] = None + ) -> Optional[Tuple[str, RETURN_VAL_TYPE]]: + llm_string = ( + await aget_prompts( + {**llm.dict(), **{"stop": stop}}, + [], + ) + )[1] + return await self.alookup_with_id(prompt, llm_string=llm_string) + def delete_by_document_id(self, document_id: str) -> None: """ Given this is a "similarity search" cache, an invalidation pattern that makes sense is first a lookup to get an ID, and then deleting with that ID. This is for the second step. """ + self.astra_env.ensure_db_setup() self.collection.delete_one(document_id) + async def adelete_by_document_id(self, document_id: str) -> None: + """ + Given this is a "similarity search" cache, an invalidation pattern + that makes sense is first a lookup to get an ID, and then deleting + with that ID. This is for the second step. + """ + await self.astra_env.aensure_db_setup() + await self.async_collection.delete_one(document_id) + def clear(self, **kwargs: Any) -> None: """Clear the *whole* semantic cache.""" - self.astra_db.truncate_collection(self.collection_name) + self.astra_env.ensure_db_setup() + self.collection.clear() + + async def aclear(self, **kwargs: Any) -> None: + """Clear the *whole* semantic cache.""" + await self.astra_env.aensure_db_setup() + await self.async_collection.clear() diff --git a/libs/community/langchain_community/callbacks/openai_info.py b/libs/community/langchain_community/callbacks/openai_info.py index c6f8262285b99..6fe210c9f5559 100644 --- a/libs/community/langchain_community/callbacks/openai_info.py +++ b/libs/community/langchain_community/callbacks/openai_info.py @@ -15,6 +15,8 @@ "gpt-4-32k-0613": 0.06, "gpt-4-vision-preview": 0.01, "gpt-4-1106-preview": 0.01, + "gpt-4-0125-preview": 0.01, + "gpt-4-turbo-preview": 0.01, # GPT-4 output "gpt-4-completion": 0.06, "gpt-4-0314-completion": 0.06, @@ -24,6 +26,8 @@ "gpt-4-32k-0613-completion": 0.12, "gpt-4-vision-preview-completion": 0.03, "gpt-4-1106-preview-completion": 0.03, + "gpt-4-0125-preview-completion": 0.03, + "gpt-4-turbo-preview-completion": 0.03, # GPT-3.5 input # gpt-3.5-turbo points at gpt-3.5-turbo-0613 until Feb 16, 2024. # Switches to gpt-3.5-turbo-0125 after. diff --git a/libs/community/langchain_community/chat_message_histories/astradb.py b/libs/community/langchain_community/chat_message_histories/astradb.py index 7257476101a38..5b118a0ac9f3c 100644 --- a/libs/community/langchain_community/chat_message_histories/astradb.py +++ b/libs/community/langchain_community/chat_message_histories/astradb.py @@ -3,9 +3,12 @@ import json import time -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List, Optional, Sequence -from langchain_community.utilities.astradb import AstraDBEnvironment +from langchain_community.utilities.astradb import ( + SetupMode, + _AstraDBCollectionEnvironment, +) if TYPE_CHECKING: from astrapy.db import AstraDB @@ -23,16 +26,16 @@ class AstraDBChatMessageHistory(BaseChatMessageHistory): """Chat message history that stores history in Astra DB. - Args (only keyword-arguments accepted): + Args: session_id: arbitrary key that is used to store the messages of a single chat session. - collection_name (str): name of the Astra DB collection to create/use. - token (Optional[str]): API token for Astra DB usage. - api_endpoint (Optional[str]): full URL to the API endpoint, + collection_name: name of the Astra DB collection to create/use. + token: API token for Astra DB usage. + api_endpoint: full URL to the API endpoint, such as "https://-us-east1.apps.astra.datastax.com". - astra_db_client (Optional[Any]): *alternative to token+api_endpoint*, + astra_db_client: *alternative to token+api_endpoint*, you can pass an already-created 'astrapy.db.AstraDB' instance. - namespace (Optional[str]): namespace (aka keyspace) where the + namespace: namespace (aka keyspace) where the collection is created. Defaults to the database's "default namespace". """ @@ -45,24 +48,29 @@ def __init__( api_endpoint: Optional[str] = None, astra_db_client: Optional[AstraDB] = None, namespace: Optional[str] = None, + setup_mode: SetupMode = SetupMode.SYNC, + pre_delete_collection: bool = False, ) -> None: - """Create an Astra DB chat message history.""" - astra_env = AstraDBEnvironment( + self.astra_env = _AstraDBCollectionEnvironment( + collection_name=collection_name, token=token, api_endpoint=api_endpoint, astra_db_client=astra_db_client, namespace=namespace, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, ) - self.astra_db = astra_env.astra_db - self.collection = self.astra_db.create_collection(collection_name) + self.collection = self.astra_env.collection + self.async_collection = self.astra_env.async_collection self.session_id = session_id self.collection_name = collection_name @property - def messages(self) -> List[BaseMessage]: # type: ignore + def messages(self) -> List[BaseMessage]: """Retrieve all session messages from DB""" + self.astra_env.ensure_db_setup() message_blobs = [ doc["body_blob"] for doc in sorted( @@ -82,16 +90,58 @@ def messages(self) -> List[BaseMessage]: # type: ignore messages = messages_from_dict(items) return messages - def add_message(self, message: BaseMessage) -> None: - """Write a message to the table""" - self.collection.insert_one( + @messages.setter + def messages(self, messages: List[BaseMessage]) -> None: + raise NotImplementedError("Use add_messages instead") + + async def aget_messages(self) -> List[BaseMessage]: + await self.astra_env.aensure_db_setup() + docs = self.async_collection.paginated_find( + filter={ + "session_id": self.session_id, + }, + projection={ + "timestamp": 1, + "body_blob": 1, + }, + ) + sorted_docs = sorted( + [doc async for doc in docs], + key=lambda _doc: _doc["timestamp"], + ) + message_blobs = [doc["body_blob"] for doc in sorted_docs] + items = [json.loads(message_blob) for message_blob in message_blobs] + messages = messages_from_dict(items) + return messages + + def add_messages(self, messages: Sequence[BaseMessage]) -> None: + self.astra_env.ensure_db_setup() + docs = [ { "timestamp": time.time(), "session_id": self.session_id, "body_blob": json.dumps(message_to_dict(message)), } - ) + for message in messages + ] + self.collection.chunked_insert_many(docs) + + async def aadd_messages(self, messages: Sequence[BaseMessage]) -> None: + await self.astra_env.aensure_db_setup() + docs = [ + { + "timestamp": time.time(), + "session_id": self.session_id, + "body_blob": json.dumps(message_to_dict(message)), + } + for message in messages + ] + await self.async_collection.chunked_insert_many(docs) def clear(self) -> None: - """Clear session memory from DB""" + self.astra_env.ensure_db_setup() self.collection.delete_many(filter={"session_id": self.session_id}) + + async def aclear(self) -> None: + await self.astra_env.aensure_db_setup() + await self.async_collection.delete_many(filter={"session_id": self.session_id}) diff --git a/libs/community/langchain_community/chat_models/__init__.py b/libs/community/langchain_community/chat_models/__init__.py index 92402bbc07f9e..db2e4e6b99696 100644 --- a/libs/community/langchain_community/chat_models/__init__.py +++ b/libs/community/langchain_community/chat_models/__init__.py @@ -55,6 +55,7 @@ from langchain_community.chat_models.vertexai import ChatVertexAI from langchain_community.chat_models.volcengine_maas import VolcEngineMaasChat from langchain_community.chat_models.yandex import ChatYandexGPT +from langchain_community.chat_models.yuan2 import ChatYuan2 from langchain_community.chat_models.zhipuai import ChatZhipuAI __all__ = [ @@ -95,6 +96,7 @@ "ChatSparkLLM", "VolcEngineMaasChat", "GPTRouter", + "ChatYuan2", "ChatZhipuAI", "ChatPerplexity", ] diff --git a/libs/community/langchain_community/chat_models/huggingface.py b/libs/community/langchain_community/chat_models/huggingface.py index 0af34a8cf04b2..143aff07172d1 100644 --- a/libs/community/langchain_community/chat_models/huggingface.py +++ b/libs/community/langchain_community/chat_models/huggingface.py @@ -1,4 +1,5 @@ """Hugging Face Chat Wrapper.""" + from typing import Any, List, Optional, Union from langchain_core.callbacks.manager import ( @@ -52,6 +53,7 @@ def __init__(self, **kwargs: Any): from transformers import AutoTokenizer self._resolve_model_id() + self.tokenizer = ( AutoTokenizer.from_pretrained(self.model_id) if self.tokenizer is None @@ -90,10 +92,10 @@ def _to_chat_prompt( ) -> str: """Convert a list of messages into a prompt format expected by wrapped LLM.""" if not messages: - raise ValueError("at least one HumanMessage must be provided") + raise ValueError("At least one HumanMessage must be provided!") if not isinstance(messages[-1], HumanMessage): - raise ValueError("last message must be a HumanMessage") + raise ValueError("Last message must be a HumanMessage!") messages_dicts = [self._to_chatml_format(m) for m in messages] @@ -135,20 +137,15 @@ def _resolve_model_id(self) -> None: from huggingface_hub import list_inference_endpoints available_endpoints = list_inference_endpoints("*") - - if isinstance(self.llm, HuggingFaceTextGenInference): - endpoint_url = self.llm.inference_server_url - - elif isinstance(self.llm, HuggingFaceEndpoint): - endpoint_url = self.llm.endpoint_url - - elif isinstance(self.llm, HuggingFaceHub): - # no need to look up model_id for HuggingFaceHub LLM + if isinstance(self.llm, HuggingFaceHub) or ( + hasattr(self.llm, "repo_id") and self.llm.repo_id + ): self.model_id = self.llm.repo_id return - + elif isinstance(self.llm, HuggingFaceTextGenInference): + endpoint_url: Optional[str] = self.llm.inference_server_url else: - raise ValueError(f"Unknown LLM type: {type(self.llm)}") + endpoint_url = self.llm.endpoint_url for endpoint in available_endpoints: if endpoint.url == endpoint_url: @@ -156,8 +153,8 @@ def _resolve_model_id(self) -> None: if not self.model_id: raise ValueError( - "Failed to resolve model_id" - f"Could not find model id for inference server provided: {endpoint_url}" + "Failed to resolve model_id:" + f"Could not find model id for inference server: {endpoint_url}" "Make sure that your Hugging Face token has access to the endpoint." ) diff --git a/libs/community/langchain_community/chat_models/ollama.py b/libs/community/langchain_community/chat_models/ollama.py index 92b5afb52ba3f..73a194a90f9b7 100644 --- a/libs/community/langchain_community/chat_models/ollama.py +++ b/libs/community/langchain_community/chat_models/ollama.py @@ -313,12 +313,12 @@ def _stream( for stream_resp in self._create_chat_stream(messages, stop, **kwargs): if stream_resp: chunk = _chat_stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk except OllamaEndpointNotFoundError: yield from self._legacy_stream(messages, stop, **kwargs) @@ -332,12 +332,12 @@ async def _astream( async for stream_resp in self._acreate_chat_stream(messages, stop, **kwargs): if stream_resp: chunk = _chat_stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: await run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk @deprecated("0.0.3", alternative="_stream") def _legacy_stream( @@ -351,9 +351,9 @@ def _legacy_stream( for stream_resp in self._create_generate_stream(prompt, stop, **kwargs): if stream_resp: chunk = _stream_response_to_chat_generation_chunk(stream_resp) - yield chunk if run_manager: run_manager.on_llm_new_token( chunk.text, verbose=self.verbose, ) + yield chunk diff --git a/libs/community/langchain_community/chat_models/yuan2.py b/libs/community/langchain_community/chat_models/yuan2.py new file mode 100644 index 0000000000000..3d622206c3540 --- /dev/null +++ b/libs/community/langchain_community/chat_models/yuan2.py @@ -0,0 +1,486 @@ +"""ChatYuan2 wrapper.""" +from __future__ import annotations + +import logging +from typing import ( + TYPE_CHECKING, + Any, + AsyncIterator, + Callable, + Dict, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + Union, +) + +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) +from langchain_core.language_models.chat_models import ( + BaseChatModel, + agenerate_from_stream, + generate_from_stream, +) +from langchain_core.messages import ( + AIMessage, + AIMessageChunk, + BaseMessage, + BaseMessageChunk, + ChatMessage, + ChatMessageChunk, + FunctionMessage, + HumanMessage, + HumanMessageChunk, + SystemMessage, + SystemMessageChunk, +) +from langchain_core.outputs import ChatGeneration, ChatGenerationChunk, ChatResult +from langchain_core.pydantic_v1 import Field, root_validator +from langchain_core.utils import ( + get_from_dict_or_env, + get_pydantic_field_names, +) +from tenacity import ( + before_sleep_log, + retry, + retry_if_exception_type, + stop_after_attempt, + wait_exponential, +) + +if TYPE_CHECKING: + from openai.types.chat import ChatCompletion, ChatCompletionMessage + +logger = logging.getLogger(__name__) + + +class ChatYuan2(BaseChatModel): + """`Yuan2.0` Chat models API. + + To use, you should have the ``openai-python`` package installed, if package + not installed, using ```pip install openai``` to install it. The + environment variable ``YUAN2_API_KEY`` set to your API key, if not set, + everyone can access apis. + + Any parameters that are valid to be passed to the openai.create call can be passed + in, even if not explicitly saved on this class. + + Example: + .. code-block:: python + + from langchain_community.chat_models import ChatYuan2 + + chat = ChatYuan2() + """ + + client: Any #: :meta private: + async_client: Any = Field(default=None, exclude=True) #: :meta private: + + model_name: str = Field(default="yuan2", alias="model") + """Model name to use.""" + + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for `create` call not explicitly specified.""" + + yuan2_api_key: Optional[str] = Field(default="EMPTY", alias="api_key") + """Automatically inferred from env var `YUAN2_API_KEY` if not provided.""" + + yuan2_api_base: Optional[str] = Field( + default="http://127.0.0.1:8000", alias="base_url" + ) + """Base URL path for API requests, an OpenAI compatible API server.""" + + request_timeout: Optional[Union[float, Tuple[float, float]]] = None + """Timeout for requests to yuan2 completion API. Default is 600 seconds.""" + + max_retries: int = 6 + """Maximum number of retries to make when generating.""" + + streaming: bool = False + """Whether to stream the results or not.""" + + max_tokens: Optional[int] = None + """Maximum number of tokens to generate.""" + + temperature: float = 1.0 + """What sampling temperature to use.""" + + top_p: Optional[float] = 0.9 + """The top-p value to use for sampling.""" + + stop: Optional[List[str]] = [""] + """A list of strings to stop generation when encountered.""" + + repeat_last_n: Optional[int] = 64 + "Last n tokens to penalize" + + repeat_penalty: Optional[float] = 1.18 + """The penalty to apply to repeated tokens.""" + + class Config: + """Configuration for this pydantic object.""" + + allow_population_by_field_name = True + + @property + def lc_secrets(self) -> Dict[str, str]: + return {"yuan2_api_key": "YUAN2_API_KEY"} + + @property + def lc_attributes(self) -> Dict[str, Any]: + attributes: Dict[str, Any] = {} + + if self.yuan2_api_base: + attributes["yuan2_api_base"] = self.yuan2_api_base + + if self.yuan2_api_key: + attributes["yuan2_api_key"] = self.yuan2_api_key + + return attributes + + @root_validator(pre=True) + def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Build extra kwargs from additional params that were passed in.""" + all_required_field_names = get_pydantic_field_names(cls) + extra = values.get("model_kwargs", {}) + for field_name in list(values): + if field_name in extra: + raise ValueError(f"Found {field_name} supplied twice.") + if field_name not in all_required_field_names: + logger.warning( + f"""WARNING! {field_name} is not default parameter. + {field_name} was transferred to model_kwargs. + Please confirm that {field_name} is what you intended.""" + ) + extra[field_name] = values.pop(field_name) + + invalid_model_kwargs = all_required_field_names.intersection(extra.keys()) + if invalid_model_kwargs: + raise ValueError( + f"Parameters {invalid_model_kwargs} should be specified explicitly. " + f"Instead they were passed in as part of `model_kwargs` parameter." + ) + + values["model_kwargs"] = extra + return values + + @root_validator() + def validate_environment(cls, values: Dict) -> Dict: + """Validate that api key and python package exists in environment.""" + values["yuan2_api_key"] = get_from_dict_or_env( + values, "yuan2_api_key", "YUAN2_API_KEY" + ) + + try: + import openai + + except ImportError: + raise ValueError( + "Could not import openai python package. " + "Please install it with `pip install openai`." + ) + client_params = { + "api_key": values["yuan2_api_key"], + "base_url": values["yuan2_api_base"], + "timeout": values["request_timeout"], + "max_retries": values["max_retries"], + } + + # generate client and async_client + if not values.get("client"): + values["client"] = openai.OpenAI(**client_params).chat.completions + if not values.get("async_client"): + values["async_client"] = openai.AsyncOpenAI( + **client_params + ).chat.completions + + return values + + @property + def _default_params(self) -> Dict[str, Any]: + """Get the default parameters for calling yuan2 API.""" + params = { + "model": self.model_name, + "stream": self.streaming, + "temperature": self.temperature, + "top_p": self.top_p, + **self.model_kwargs, + } + if self.max_tokens is not None: + params["max_tokens"] = self.max_tokens + if self.request_timeout is not None: + params["request_timeout"] = self.request_timeout + return params + + def completion_with_retry(self, **kwargs: Any) -> Any: + """Use tenacity to retry the completion call.""" + retry_decorator = _create_retry_decorator(self) + + @retry_decorator + def _completion_with_retry(**kwargs: Any) -> Any: + return self.client.create(**kwargs) + + return _completion_with_retry(**kwargs) + + def _combine_llm_outputs(self, llm_outputs: List[Optional[dict]]) -> dict: + overall_token_usage: dict = {} + logger.debug( + f"type(llm_outputs): {type(llm_outputs)}; llm_outputs: {llm_outputs}" + ) + for output in llm_outputs: + if output is None: + # Happens in streaming + continue + token_usage = output["token_usage"] + for k, v in token_usage.__dict__.items(): + if k in overall_token_usage: + overall_token_usage[k] += v + else: + overall_token_usage[k] = v + return {"token_usage": overall_token_usage, "model_name": self.model_name} + + def _stream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[ChatGenerationChunk]: + message_dicts, params = self._create_message_dicts(messages, stop) + params = {**params, **kwargs, "stream": True} + + default_chunk_class = AIMessageChunk + for chunk in self.completion_with_retry(messages=message_dicts, **params): + if not isinstance(chunk, dict): + chunk = chunk.model_dump() + if len(chunk["choices"]) == 0: + continue + choice = chunk["choices"][0] + chunk = _convert_delta_to_message_chunk( + choice["delta"], default_chunk_class + ) + finish_reason = choice.get("finish_reason") + generation_info = ( + dict(finish_reason=finish_reason) if finish_reason is not None else None + ) + default_chunk_class = chunk.__class__ + yield ChatGenerationChunk( + message=chunk, + generation_info=generation_info, + ) + if run_manager: + run_manager.on_llm_new_token(chunk.content) + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + if self.streaming: + stream_iter = self._stream( + messages=messages, stop=stop, run_manager=run_manager, **kwargs + ) + return generate_from_stream(stream_iter) + + message_dicts, params = self._create_message_dicts(messages, stop) + params = {**params, **kwargs} + response = self.completion_with_retry(messages=message_dicts, **params) + return self._create_chat_result(response) + + def _create_message_dicts( + self, messages: List[BaseMessage], stop: Optional[List[str]] + ) -> Tuple[List[Dict[str, Any]], Dict[str, Any]]: + params = dict(self._invocation_params) + if stop is not None: + if "stop" in params: + raise ValueError("`stop` found in both the input and default params.") + params["stop"] = stop + message_dicts = [_convert_message_to_dict(m) for m in messages] + return message_dicts, params + + def _create_chat_result(self, response: ChatCompletion) -> ChatResult: + generations = [] + logger.debug(f"type(response): {type(response)}; response: {response}") + for res in response.choices: + message = _convert_dict_to_message(res.message) + generation_info = dict(finish_reason=res.finish_reason) + if "logprobs" in res: + generation_info["logprobs"] = res.logprobs + gen = ChatGeneration( + message=message, + generation_info=generation_info, + ) + generations.append(gen) + llm_output = { + "token_usage": response.usage, + "model_name": self.model_name, + } + return ChatResult(generations=generations, llm_output=llm_output) + + async def _astream( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> AsyncIterator[ChatGenerationChunk]: + message_dicts, params = self._create_message_dicts(messages, stop) + params = {**params, **kwargs, "stream": True} + + default_chunk_class = AIMessageChunk + async for chunk in await acompletion_with_retry( + self, messages=message_dicts, **params + ): + if not isinstance(chunk, dict): + chunk = chunk.model_dump() + if len(chunk["choices"]) == 0: + continue + choice = chunk["choices"][0] + chunk = _convert_delta_to_message_chunk( + choice["delta"], default_chunk_class + ) + finish_reason = choice.get("finish_reason") + generation_info = ( + dict(finish_reason=finish_reason) if finish_reason is not None else None + ) + default_chunk_class = chunk.__class__ + yield ChatGenerationChunk( + message=chunk, + generation_info=generation_info, + ) + if run_manager: + await run_manager.on_llm_new_token(chunk.content) + + async def _agenerate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + if self.streaming: + stream_iter = self._astream( + messages=messages, stop=stop, run_manager=run_manager, **kwargs + ) + return await agenerate_from_stream(stream_iter) + + message_dicts, params = self._create_message_dicts(messages, stop) + params = {**params, **kwargs} + response = await acompletion_with_retry(self, messages=message_dicts, **params) + return self._create_chat_result(response) + + @property + def _invocation_params(self) -> Mapping[str, Any]: + """Get the parameters used to invoke the model.""" + yuan2_creds: Dict[str, Any] = { + "model": self.model_name, + } + return {**yuan2_creds, **self._default_params} + + @property + def _llm_type(self) -> str: + """Return type of chat model.""" + return "chat-yuan2" + + +def _create_retry_decorator(llm: ChatYuan2) -> Callable[[Any], Any]: + import openai + + min_seconds = 1 + max_seconds = 60 + # Wait 2^x * 1 second between each retry starting with + # 4 seconds, then up to 10 seconds, then 10 seconds afterwards + return retry( + reraise=True, + stop=stop_after_attempt(llm.max_retries), + wait=wait_exponential(multiplier=1, min=min_seconds, max=max_seconds), + retry=( + retry_if_exception_type(openai.APITimeoutError) + | retry_if_exception_type(openai.APIError) + | retry_if_exception_type(openai.APIConnectionError) + | retry_if_exception_type(openai.RateLimitError) + | retry_if_exception_type(openai.InternalServerError) + ), + before_sleep=before_sleep_log(logger, logging.WARNING), + ) + + +async def acompletion_with_retry(llm: ChatYuan2, **kwargs: Any) -> Any: + """Use tenacity to retry the async completion call.""" + retry_decorator = _create_retry_decorator(llm) + + @retry_decorator + async def _completion_with_retry(**kwargs: Any) -> Any: + # Use OpenAI's async api https://github.com/openai/openai-python#async-api + return await llm.async_client.create(**kwargs) + + return await _completion_with_retry(**kwargs) + + +def _convert_delta_to_message_chunk( + _dict: ChatCompletionMessage, default_class: Type[BaseMessageChunk] +) -> BaseMessageChunk: + role = _dict.get("role") + content = _dict.get("content") or "" + + if role == "user" or default_class == HumanMessageChunk: + return HumanMessageChunk(content=content) + elif role == "assistant" or default_class == AIMessageChunk: + return AIMessageChunk(content=content) + elif role == "system" or default_class == SystemMessageChunk: + return SystemMessageChunk(content=content) + elif role or default_class == ChatMessageChunk: + return ChatMessageChunk(content=content, role=role) + else: + return default_class(content=content) + + +def _convert_dict_to_message(_dict: ChatCompletionMessage) -> BaseMessage: + role = _dict.get("role") + if role == "user": + return HumanMessage(content=_dict.get("content")) + elif role == "assistant": + content = _dict.get("content") or "" + return AIMessage(content=content) + elif role == "system": + return SystemMessage(content=_dict.get("content")) + else: + return ChatMessage(content=_dict.get("content"), role=role) + + +def _convert_message_to_dict(message: BaseMessage) -> dict: + """Convert a LangChain message to a dictionary. + + Args: + message: The LangChain message. + + Returns: + The dictionary. + """ + message_dict: Dict[str, Any] + if isinstance(message, ChatMessage): + message_dict = {"role": message.role, "content": message.content} + elif isinstance(message, HumanMessage): + message_dict = {"role": "user", "content": message.content} + elif isinstance(message, AIMessage): + message_dict = {"role": "assistant", "content": message.content} + elif isinstance(message, SystemMessage): + message_dict = {"role": "system", "content": message.content} + elif isinstance(message, FunctionMessage): + message_dict = { + "role": "function", + "name": message.name, + "content": message.content, + } + else: + raise ValueError(f"Got unknown type {message}") + if "name" in message.additional_kwargs: + message_dict["name"] = message.additional_kwargs["name"] + return message_dict diff --git a/libs/community/langchain_community/document_loaders/__init__.py b/libs/community/langchain_community/document_loaders/__init__.py index 869b0dca0329b..8f2d6baf9b535 100644 --- a/libs/community/langchain_community/document_loaders/__init__.py +++ b/libs/community/langchain_community/document_loaders/__init__.py @@ -36,6 +36,7 @@ ) from langchain_community.document_loaders.astradb import AstraDBLoader from langchain_community.document_loaders.async_html import AsyncHtmlLoader +from langchain_community.document_loaders.athena import AthenaLoader from langchain_community.document_loaders.azlyrics import AZLyricsLoader from langchain_community.document_loaders.azure_ai_data import ( AzureAIDataLoader, @@ -159,6 +160,7 @@ PyPDFLoader, UnstructuredPDFLoader, ) +from langchain_community.document_loaders.pebblo import PebbloSafeLoader from langchain_community.document_loaders.polars_dataframe import PolarsDataFrameLoader from langchain_community.document_loaders.powerpoint import UnstructuredPowerPointLoader from langchain_community.document_loaders.psychic import PsychicLoader @@ -257,6 +259,7 @@ "AssemblyAIAudioTranscriptLoader", "AstraDBLoader", "AsyncHtmlLoader", + "AthenaLoader", "AzureAIDataLoader", "AzureAIDocumentIntelligenceLoader", "AzureBlobStorageContainerLoader", @@ -282,6 +285,7 @@ "CubeSemanticLoader", "DataFrameLoader", "DatadogLogsLoader", + "PebbloSafeLoader", "DiffbotLoader", "DirectoryLoader", "DiscordChatLoader", diff --git a/libs/community/langchain_community/document_loaders/arcgis_loader.py b/libs/community/langchain_community/document_loaders/arcgis_loader.py index 4958ef2ce6a00..24264b0add7c3 100644 --- a/libs/community/langchain_community/document_loaders/arcgis_loader.py +++ b/libs/community/langchain_community/document_loaders/arcgis_loader.py @@ -41,7 +41,7 @@ def __init__( ) from e try: - from bs4 import BeautifulSoup # type: ignore + from bs4 import BeautifulSoup self.BEAUTIFULSOUP = BeautifulSoup except ImportError: diff --git a/libs/community/langchain_community/document_loaders/assemblyai.py b/libs/community/langchain_community/document_loaders/assemblyai.py index ee040ab513dba..3b1b4060b9864 100644 --- a/libs/community/langchain_community/document_loaders/assemblyai.py +++ b/libs/community/langchain_community/document_loaders/assemblyai.py @@ -123,7 +123,9 @@ class AssemblyAIAudioLoaderById(BaseLoader): """ - def __init__(self, transcript_id, api_key, transcript_format): # type: ignore[no-untyped-def] + def __init__( + self, transcript_id: str, api_key: str, transcript_format: TranscriptFormat + ): """ Initializes the AssemblyAI AssemblyAIAudioLoaderById. diff --git a/libs/community/langchain_community/document_loaders/astradb.py b/libs/community/langchain_community/document_loaders/astradb.py index 4cc1621a39d64..8dae5c4528a32 100644 --- a/libs/community/langchain_community/document_loaders/astradb.py +++ b/libs/community/langchain_community/document_loaders/astradb.py @@ -2,8 +2,6 @@ import json import logging -import threading -from queue import Queue from typing import ( TYPE_CHECKING, Any, @@ -16,10 +14,9 @@ ) from langchain_core.documents import Document -from langchain_core.runnables import run_in_executor from langchain_community.document_loaders.base import BaseLoader -from langchain_community.utilities.astradb import AstraDBEnvironment +from langchain_community.utilities.astradb import _AstraDBEnvironment if TYPE_CHECKING: from astrapy.db import AstraDB, AsyncAstraDB @@ -33,6 +30,7 @@ class AstraDBLoader(BaseLoader): def __init__( self, collection_name: str, + *, token: Optional[str] = None, api_endpoint: Optional[str] = None, astra_db_client: Optional[AstraDB] = None, @@ -44,7 +42,7 @@ def __init__( nb_prefetched: int = 1000, extraction_function: Callable[[Dict], str] = json.dumps, ) -> None: - astra_env = AstraDBEnvironment( + astra_env = _AstraDBEnvironment( token=token, api_endpoint=api_endpoint, astra_db_client=astra_db_client, @@ -65,38 +63,27 @@ def load(self) -> List[Document]: return list(self.lazy_load()) def lazy_load(self) -> Iterator[Document]: - queue = Queue(self.nb_prefetched) # type: ignore[var-annotated] - t = threading.Thread(target=self.fetch_results, args=(queue,)) - t.start() - while True: - doc = queue.get() - if doc is None: - break - yield doc - t.join() + for doc in self.collection.paginated_find( + filter=self.filter, + options=self.find_options, + projection=self.projection, + sort=None, + prefetched=self.nb_prefetched, + ): + yield Document( + page_content=self.extraction_function(doc), + metadata={ + "namespace": self.collection.astra_db.namespace, + "api_endpoint": self.collection.astra_db.base_url, + "collection": self.collection_name, + }, + ) async def aload(self) -> List[Document]: """Load data into Document objects.""" return [doc async for doc in self.alazy_load()] async def alazy_load(self) -> AsyncIterator[Document]: - if not self.astra_env.async_astra_db: - iterator = run_in_executor( - None, - self.collection.paginated_find, - filter=self.filter, - options=self.find_options, - projection=self.projection, - sort=None, - prefetched=True, - ) - done = object() - while True: - item = await run_in_executor(None, lambda it: next(it, done), iterator) - if item is done: - break - yield item # type: ignore[misc] - return async_collection = await self.astra_env.async_astra_db.collection( self.collection_name ) @@ -105,7 +92,7 @@ async def alazy_load(self) -> AsyncIterator[Document]: options=self.find_options, projection=self.projection, sort=None, - prefetched=True, + prefetched=self.nb_prefetched, ): yield Document( page_content=self.extraction_function(doc), @@ -115,29 +102,3 @@ async def alazy_load(self) -> AsyncIterator[Document]: "collection": self.collection_name, }, ) - - def fetch_results(self, queue: Queue): # type: ignore[no-untyped-def] - self.fetch_page_result(queue) - while self.find_options.get("pageState"): - self.fetch_page_result(queue) - queue.put(None) - - def fetch_page_result(self, queue: Queue): # type: ignore[no-untyped-def] - res = self.collection.find( - filter=self.filter, - options=self.find_options, - projection=self.projection, - sort=None, - ) - self.find_options["pageState"] = res["data"].get("nextPageState") - for doc in res["data"]["documents"]: - queue.put( - Document( - page_content=self.extraction_function(doc), - metadata={ - "namespace": self.collection.astra_db.namespace, - "api_endpoint": self.collection.astra_db.base_url, - "collection": self.collection.collection_name, - }, - ) - ) diff --git a/libs/community/langchain_community/document_loaders/athena.py b/libs/community/langchain_community/document_loaders/athena.py new file mode 100644 index 0000000000000..e3ed42e44dbaa --- /dev/null +++ b/libs/community/langchain_community/document_loaders/athena.py @@ -0,0 +1,163 @@ +from __future__ import annotations + +import io +import json +import time +from typing import Any, Dict, Iterator, List, Optional, Tuple + +from langchain_core.documents import Document + +from langchain_community.document_loaders.base import BaseLoader + + +class AthenaLoader(BaseLoader): + """Load documents from `AWS Athena`. + + Each document represents one row of the result. + - By default, all columns are written into the `page_content` of the document + and none into the `metadata` of the document. + - If `metadata_columns` are provided then these columns are written + into the `metadata` of the document while the rest of the columns + are written into the `page_content` of the document. + + To authenticate, the AWS client uses this method to automatically load credentials: + https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html + + If a specific credential profile should be used, you must pass + the name of the profile from the ~/.aws/credentials file that is to be used. + + Make sure the credentials / roles used have the required policies to + access the Amazon Textract service. + """ + + def __init__( + self, + query: str, + database: str, + s3_output_uri: str, + profile_name: str, + metadata_columns: Optional[List[str]] = None, + ): + """Initialize Athena document loader. + + Args: + query: The query to run in Athena. + database: Athena database + s3_output_uri: Athena output path + metadata_columns: Optional. Columns written to Document `metadata`. + """ + self.query = query + self.database = database + self.s3_output_uri = s3_output_uri + self.metadata_columns = metadata_columns if metadata_columns is not None else [] + + try: + import boto3 + except ImportError: + raise ModuleNotFoundError( + "Could not import boto3 python package. " + "Please install it with `pip install boto3`." + ) + + try: + session = ( + boto3.Session(profile_name=profile_name) + if profile_name is not None + else boto3.Session() + ) + except Exception as e: + raise ValueError( + "Could not load credentials to authenticate with AWS client. " + "Please check that credentials in the specified " + "profile name are valid." + ) from e + + self.athena_client = session.client("athena") + self.s3_client = session.client("s3") + + def _execute_query(self) -> List[Dict[str, Any]]: + response = self.athena_client.start_query_execution( + QueryString=self.query, + QueryExecutionContext={"Database": self.database}, + ResultConfiguration={"OutputLocation": self.s3_output_uri}, + ) + query_execution_id = response["QueryExecutionId"] + while True: + response = self.athena_client.get_query_execution( + QueryExecutionId=query_execution_id + ) + state = response["QueryExecution"]["Status"]["State"] + if state == "SUCCEEDED": + break + elif state == "FAILED": + resp_status = response["QueryExecution"]["Status"] + state_change_reason = resp_status["StateChangeReason"] + err = f"Query Failed: {state_change_reason}" + raise Exception(err) + elif state == "CANCELLED": + raise Exception("Query was cancelled by the user.") + time.sleep(1) + + result_set = self._get_result_set(query_execution_id) + return json.loads(result_set.to_json(orient="records")) + + def _remove_suffix(self, input_string: str, suffix: str) -> str: + if suffix and input_string.endswith(suffix): + return input_string[: -len(suffix)] + return input_string + + def _remove_prefix(self, input_string: str, suffix: str) -> str: + if suffix and input_string.startswith(suffix): + return input_string[len(suffix) :] + return input_string + + def _get_result_set(self, query_execution_id: str) -> Any: + try: + import pandas as pd + except ImportError: + raise ModuleNotFoundError( + "Could not import pandas python package. " + "Please install it with `pip install pandas`." + ) + + output_uri = self.s3_output_uri + tokens = self._remove_prefix( + self._remove_suffix(output_uri, "/"), "s3://" + ).split("/") + bucket = tokens[0] + key = "/".join(tokens[1:] + [query_execution_id]) + ".csv" + + obj = self.s3_client.get_object(Bucket=bucket, Key=key) + df = pd.read_csv(io.BytesIO(obj["Body"].read()), encoding="utf8") + return df + + def _get_columns( + self, query_result: List[Dict[str, Any]] + ) -> Tuple[List[str], List[str]]: + content_columns = [] + metadata_columns = [] + all_columns = list(query_result[0].keys()) + for key in all_columns: + if key in self.metadata_columns: + metadata_columns.append(key) + else: + content_columns.append(key) + + return content_columns, metadata_columns + + def lazy_load(self) -> Iterator[Document]: + query_result = self._execute_query() + content_columns, metadata_columns = self._get_columns(query_result) + for row in query_result: + page_content = "\n".join( + f"{k}: {v}" for k, v in row.items() if k in content_columns + ) + metadata = { + k: v for k, v in row.items() if k in metadata_columns and v is not None + } + doc = Document(page_content=page_content, metadata=metadata) + yield doc + + def load(self) -> List[Document]: + """Load data into document objects.""" + return list(self.lazy_load()) diff --git a/libs/community/langchain_community/document_loaders/blob_loaders/file_system.py b/libs/community/langchain_community/document_loaders/blob_loaders/file_system.py index d9c8ebf883347..ee756f32ed73e 100644 --- a/libs/community/langchain_community/document_loaders/blob_loaders/file_system.py +++ b/libs/community/langchain_community/document_loaders/blob_loaders/file_system.py @@ -12,6 +12,7 @@ def _make_iterator( length_func: Callable[[], int], show_progress: bool = False ) -> Callable[[Iterable[T]], Iterator[T]]: """Create a function that optionally wraps an iterable in tqdm.""" + iterator: Callable[[Iterable[T]], Iterator[T]] if show_progress: try: from tqdm.auto import tqdm @@ -29,7 +30,7 @@ def _with_tqdm(iterable: Iterable[T]) -> Iterator[T]: iterator = _with_tqdm else: - iterator = iter # type: ignore + iterator = iter return iterator diff --git a/libs/community/langchain_community/document_loaders/cassandra.py b/libs/community/langchain_community/document_loaders/cassandra.py index a3b7732c131cb..3cef56a1cbcc8 100644 --- a/libs/community/langchain_community/document_loaders/cassandra.py +++ b/libs/community/langchain_community/document_loaders/cassandra.py @@ -29,18 +29,18 @@ def __init__( table: Optional[str] = None, session: Optional[Session] = None, keyspace: Optional[str] = None, - query: Optional[Union[str, Statement]] = None, + query: Union[str, Statement, None] = None, page_content_mapper: Callable[[Any], str] = str, metadata_mapper: Callable[[Any], dict] = lambda _: {}, *, - query_parameters: Union[dict, Sequence] = None, # type: ignore[assignment] + query_parameters: Union[dict, Sequence, None] = None, query_timeout: Optional[float] = _NOT_SET, # type: ignore[assignment] query_trace: bool = False, - query_custom_payload: dict = None, # type: ignore[assignment] + query_custom_payload: Optional[dict] = None, query_execution_profile: Any = _NOT_SET, query_paging_state: Any = None, - query_host: Host = None, - query_execute_as: str = None, # type: ignore[assignment] + query_host: Optional[Host] = None, + query_execute_as: Optional[str] = None, ) -> None: """ Document Loader for Apache Cassandra. diff --git a/libs/community/langchain_community/document_loaders/csv_loader.py b/libs/community/langchain_community/document_loaders/csv_loader.py index 92198ac5bac7a..28d340e5fbd6d 100644 --- a/libs/community/langchain_community/document_loaders/csv_loader.py +++ b/libs/community/langchain_community/document_loaders/csv_loader.py @@ -90,7 +90,7 @@ def load(self) -> List[Document]: def __read_file(self, csvfile: TextIOWrapper) -> List[Document]: docs = [] - csv_reader = csv.DictReader(csvfile, **self.csv_args) # type: ignore + csv_reader = csv.DictReader(csvfile, **self.csv_args) for i, row in enumerate(csv_reader): try: source = ( diff --git a/libs/community/langchain_community/document_loaders/directory.py b/libs/community/langchain_community/document_loaders/directory.py index 3837133a69332..3cb2ad1309a0a 100644 --- a/libs/community/langchain_community/document_loaders/directory.py +++ b/libs/community/langchain_community/document_loaders/directory.py @@ -2,7 +2,7 @@ import logging import random from pathlib import Path -from typing import Any, List, Optional, Type, Union +from typing import Any, List, Optional, Sequence, Type, Union from langchain_core.documents import Document @@ -41,6 +41,7 @@ def __init__( use_multithreading: bool = False, max_concurrency: int = 4, *, + exclude: Union[Sequence[str], str] = (), sample_size: int = 0, randomize_sample: bool = False, sample_seed: Union[int, None] = None, @@ -51,6 +52,8 @@ def __init__( path: Path to directory. glob: Glob pattern to use to find files. Defaults to "**/[!.]*" (all files except hidden). + exclude: A pattern or list of patterns to exclude from results. + Use glob syntax. silent_errors: Whether to silently ignore errors. Defaults to False. load_hidden: Whether to load hidden files. Defaults to False. loader_cls: Loader class to use for loading files. @@ -64,11 +67,38 @@ def __init__( directory. randomize_sample: Shuffle the files to get a random sample. sample_seed: set the seed of the random shuffle for reproducibility. + + Examples: + + .. code-block:: python + from langchain_community.document_loaders import DirectoryLoader + + # Load all non-hidden files in a directory. + loader = DirectoryLoader("/path/to/directory") + + # Load all text files in a directory without recursion. + loader = DirectoryLoader("/path/to/directory", glob="*.txt") + + # Recursively load all text files in a directory. + loader = DirectoryLoader( + "/path/to/directory", glob="*.txt", recursive=True + ) + + # Load all files in a directory, except for py files. + loader = DirectoryLoader("/path/to/directory", exclude="*.py") + + # Load all files in a directory, except for py or pyc files. + loader = DirectoryLoader( + "/path/to/directory", exclude=["*.py", "*.pyc"] + ) """ if loader_kwargs is None: loader_kwargs = {} + if isinstance(exclude, str): + exclude = (exclude,) self.path = path self.glob = glob + self.exclude = exclude self.load_hidden = load_hidden self.loader_cls = loader_cls self.loader_kwargs = loader_kwargs @@ -118,14 +148,20 @@ def load(self) -> List[Document]: raise ValueError(f"Expected directory, got file: '{self.path}'") docs: List[Document] = [] - items = list(p.rglob(self.glob) if self.recursive else p.glob(self.glob)) + + paths = p.rglob(self.glob) if self.recursive else p.glob(self.glob) + items = [ + path + for path in paths + if not (self.exclude and any(path.match(glob) for glob in self.exclude)) + ] if self.sample_size > 0: if self.randomize_sample: - randomizer = ( - random.Random(self.sample_seed) if self.sample_seed else random + randomizer = random.Random( + self.sample_seed if self.sample_seed else None ) - randomizer.shuffle(items) # type: ignore + randomizer.shuffle(items) items = items[: min(len(items), self.sample_size)] pbar = None diff --git a/libs/community/langchain_community/document_loaders/git.py b/libs/community/langchain_community/document_loaders/git.py index 97c02b2111b00..8d97ecd3df419 100644 --- a/libs/community/langchain_community/document_loaders/git.py +++ b/libs/community/langchain_community/document_loaders/git.py @@ -41,7 +41,7 @@ def __init__( def load(self) -> List[Document]: try: - from git import Blob, Repo # type: ignore + from git import Blob, Repo except ImportError as ex: raise ImportError( "Could not import git python package. " @@ -76,7 +76,7 @@ def load(self) -> List[Document]: file_path = os.path.join(self.repo_path, item.path) - ignored_files = repo.ignored([file_path]) # type: ignore + ignored_files = repo.ignored([file_path]) if len(ignored_files): continue diff --git a/libs/community/langchain_community/document_loaders/github.py b/libs/community/langchain_community/document_loaders/github.py index 8a361a46e2b13..65f327d948f19 100644 --- a/libs/community/langchain_community/document_loaders/github.py +++ b/libs/community/langchain_community/document_loaders/github.py @@ -65,6 +65,12 @@ class GitHubIssuesLoader(BaseGitHubLoader): since: Optional[str] = None """Only show notifications updated after the given time. This is a timestamp in ISO 8601 format: YYYY-MM-DDTHH:MM:SSZ.""" + page: Optional[int] = None + """The page number for paginated results. + Defaults to 1 in the GitHub API.""" + per_page: Optional[int] = None + """Number of items per page. + Defaults to 30 in the GitHub API.""" @validator("since", allow_reuse=True) def validate_since(cls, v: Optional[str]) -> Optional[str]: @@ -112,7 +118,11 @@ def lazy_load(self) -> Iterator[Document]: if not self.include_prs and doc.metadata["is_pull_request"]: continue yield doc - if response.links and response.links.get("next"): + if ( + response.links + and response.links.get("next") + and (not self.page and not self.per_page) + ): url = response.links["next"]["url"] else: url = None @@ -176,6 +186,8 @@ def query_params(self) -> str: "sort": self.sort, "direction": self.direction, "since": self.since, + "page": self.page, + "per_page": self.per_page, } query_params_list = [ f"{k}={v}" for k, v in query_params_dict.items() if v is not None diff --git a/libs/community/langchain_community/document_loaders/notiondb.py b/libs/community/langchain_community/document_loaders/notiondb.py index 0987d532c9723..bcc9c38c14ad1 100644 --- a/libs/community/langchain_community/document_loaders/notiondb.py +++ b/libs/community/langchain_community/document_loaders/notiondb.py @@ -20,6 +20,17 @@ class NotionDBLoader(BaseLoader): database_id (str): Notion database id. request_timeout_sec (int): Timeout for Notion requests in seconds. Defaults to 10. + filter_object (Dict[str, Any]): Filter object used to limit returned + entries based on specified criteria. + E.g.: { + "timestamp": "last_edited_time", + "last_edited_time": { + "on_or_after": "2024-02-07" + } + } -> will only return entries that were last edited + on or after 2024-02-07 + Notion docs: https://developers.notion.com/reference/post-database-query-filter + Defaults to None, which will return ALL entries. """ def __init__( @@ -27,6 +38,8 @@ def __init__( integration_token: str, database_id: str, request_timeout_sec: Optional[int] = 10, + *, + filter_object: Optional[Dict[str, Any]] = None, ) -> None: """Initialize with parameters.""" if not integration_token: @@ -42,6 +55,7 @@ def __init__( "Notion-Version": "2022-06-28", } self.request_timeout_sec = request_timeout_sec + self.filter_object = filter_object or {} def load(self) -> List[Document]: """Load documents from the Notion database. @@ -55,7 +69,10 @@ def load(self) -> List[Document]: def _retrieve_page_summaries( self, query_dict: Dict[str, Any] = {"page_size": 100} ) -> List[Dict[str, Any]]: - """Get all the pages from a Notion database.""" + """ + Get all the pages from a Notion database + OR filter based on specified criteria. + """ pages: List[Dict[str, Any]] = [] while True: @@ -63,6 +80,7 @@ def _retrieve_page_summaries( DATABASE_URL.format(database_id=self.database_id), method="POST", query_dict=query_dict, + filter_object=self.filter_object, ) pages.extend(data.get("results")) @@ -182,13 +200,18 @@ def _load_blocks(self, block_id: str, num_tabs: int = 0) -> str: return "\n".join(result_lines_arr) def _request( - self, url: str, method: str = "GET", query_dict: Dict[str, Any] = {} + self, + url: str, + method: str = "GET", + query_dict: Dict[str, Any] = {}, + *, + filter_object: Optional[Dict[str, Any]] = None, ) -> Any: res = requests.request( method, url, headers=self.headers, - json=query_dict, + json={**query_dict, "filter": filter_object or {}}, timeout=self.request_timeout_sec, ) res.raise_for_status() diff --git a/libs/community/langchain_community/document_loaders/parsers/language/c.py b/libs/community/langchain_community/document_loaders/parsers/language/c.py new file mode 100644 index 0000000000000..2db1ec99fca4a --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/c.py @@ -0,0 +1,36 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (struct_specifier + body: (field_declaration_list)) @struct + (enum_specifier + body: (enumerator_list)) @enum + (union_specifier + body: (field_declaration_list)) @union + (function_definition) @function + ] +""".strip() + + +class CSegmenter(TreeSitterSegmenter): + """Code segmenter for C.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("c") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/cpp.py b/libs/community/langchain_community/document_loaders/parsers/language/cpp.py new file mode 100644 index 0000000000000..9d09164a846e7 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/cpp.py @@ -0,0 +1,36 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (class_specifier + body: (field_declaration_list)) @class + (struct_specifier + body: (field_declaration_list)) @struct + (union_specifier + body: (field_declaration_list)) @union + (function_definition) @function + ] +""".strip() + + +class CPPSegmenter(TreeSitterSegmenter): + """Code segmenter for C++.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("cpp") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/csharp.py b/libs/community/langchain_community/document_loaders/parsers/language/csharp.py new file mode 100644 index 0000000000000..a9f809fa00a84 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/csharp.py @@ -0,0 +1,36 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (namespace_declaration) @namespace + (class_declaration) @class + (method_declaration) @method + (interface_declaration) @interface + (enum_declaration) @enum + (struct_declaration) @struct + (record_declaration) @record + ] +""".strip() + + +class CSharpSegmenter(TreeSitterSegmenter): + """Code segmenter for C#.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("c_sharp") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/go.py b/libs/community/langchain_community/document_loaders/parsers/language/go.py new file mode 100644 index 0000000000000..f836ab3ad710c --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/go.py @@ -0,0 +1,31 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_declaration) @function + (type_declaration) @type + ] +""".strip() + + +class GoSegmenter(TreeSitterSegmenter): + """Code segmenter for Go.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("go") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/java.py b/libs/community/langchain_community/document_loaders/parsers/language/java.py new file mode 100644 index 0000000000000..c7293e1ed7f78 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/java.py @@ -0,0 +1,32 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (class_declaration) @class + (interface_declaration) @interface + (enum_declaration) @enum + ] +""".strip() + + +class JavaSegmenter(TreeSitterSegmenter): + """Code segmenter for Java.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("java") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/javascript.py b/libs/community/langchain_community/document_loaders/parsers/language/javascript.py index 0f2fea68fa232..27a360a2e6c64 100644 --- a/libs/community/langchain_community/document_loaders/parsers/language/javascript.py +++ b/libs/community/langchain_community/document_loaders/parsers/language/javascript.py @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any, List, Tuple from langchain_community.document_loaders.parsers.language.code_segmenter import ( CodeSegmenter, @@ -55,15 +55,18 @@ def simplify_code(self) -> str: tree = esprima.parseScript(self.code, loc=True) simplified_lines = self.source_lines[:] + indices_to_del: List[Tuple[int, int]] = [] for node in tree.body: if isinstance( node, (esprima.nodes.FunctionDeclaration, esprima.nodes.ClassDeclaration), ): - start = node.loc.start.line - 1 + start, end = node.loc.start.line - 1, node.loc.end.line simplified_lines[start] = f"// Code for: {simplified_lines[start]}" - for line_num in range(start + 1, node.loc.end.line): - simplified_lines[line_num] = None # type: ignore + indices_to_del.append((start + 1, end)) - return "\n".join(line for line in simplified_lines if line is not None) + for start, end in reversed(indices_to_del): + del simplified_lines[start + 0 : end] + + return "\n".join(line for line in simplified_lines) diff --git a/libs/community/langchain_community/document_loaders/parsers/language/kotlin.py b/libs/community/langchain_community/document_loaders/parsers/language/kotlin.py new file mode 100644 index 0000000000000..6f946f7b4a622 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/kotlin.py @@ -0,0 +1,31 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_declaration) @function + (class_declaration) @class + ] +""".strip() + + +class KotlinSegmenter(TreeSitterSegmenter): + """Code segmenter for Kotlin.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("kotlin") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/language_parser.py b/libs/community/langchain_community/document_loaders/parsers/language/language_parser.py index b3f6534327f55..8709b58becf42 100644 --- a/libs/community/langchain_community/document_loaders/parsers/language/language_parser.py +++ b/libs/community/langchain_community/document_loaders/parsers/language/language_parser.py @@ -6,11 +6,25 @@ from langchain_community.document_loaders.base import BaseBlobParser from langchain_community.document_loaders.blob_loaders import Blob +from langchain_community.document_loaders.parsers.language.c import CSegmenter from langchain_community.document_loaders.parsers.language.cobol import CobolSegmenter +from langchain_community.document_loaders.parsers.language.cpp import CPPSegmenter +from langchain_community.document_loaders.parsers.language.csharp import CSharpSegmenter +from langchain_community.document_loaders.parsers.language.go import GoSegmenter +from langchain_community.document_loaders.parsers.language.java import JavaSegmenter from langchain_community.document_loaders.parsers.language.javascript import ( JavaScriptSegmenter, ) +from langchain_community.document_loaders.parsers.language.kotlin import KotlinSegmenter +from langchain_community.document_loaders.parsers.language.lua import LuaSegmenter +from langchain_community.document_loaders.parsers.language.perl import PerlSegmenter from langchain_community.document_loaders.parsers.language.python import PythonSegmenter +from langchain_community.document_loaders.parsers.language.ruby import RubySegmenter +from langchain_community.document_loaders.parsers.language.rust import RustSegmenter +from langchain_community.document_loaders.parsers.language.scala import ScalaSegmenter +from langchain_community.document_loaders.parsers.language.typescript import ( + TypeScriptSegmenter, +) if TYPE_CHECKING: from langchain.text_splitter import Language @@ -22,12 +36,36 @@ "py": Language.PYTHON, "js": Language.JS, "cobol": Language.COBOL, + "c": Language.C, + "cpp": Language.CPP, + "cs": Language.CSHARP, + "rb": Language.RUBY, + "scala": Language.SCALA, + "rs": Language.RUST, + "go": Language.GO, + "kt": Language.KOTLIN, + "lua": Language.LUA, + "pl": Language.PERL, + "ts": Language.TS, + "java": Language.JAVA, } LANGUAGE_SEGMENTERS: Dict[str, Any] = { Language.PYTHON: PythonSegmenter, Language.JS: JavaScriptSegmenter, Language.COBOL: CobolSegmenter, + Language.C: CSegmenter, + Language.CPP: CPPSegmenter, + Language.CSHARP: CSharpSegmenter, + Language.RUBY: RubySegmenter, + Language.RUST: RustSegmenter, + Language.SCALA: ScalaSegmenter, + Language.GO: GoSegmenter, + Language.KOTLIN: KotlinSegmenter, + Language.LUA: LuaSegmenter, + Language.PERL: PerlSegmenter, + Language.TS: TypeScriptSegmenter, + Language.JAVA: JavaSegmenter, } except ImportError: LANGUAGE_EXTENSIONS = {} @@ -43,11 +81,34 @@ class LanguageParser(BaseBlobParser): This approach can potentially improve the accuracy of QA models over source code. - Currently, the supported languages for code parsing are Python and JavaScript. + The supported languages for code parsing are: + + - C (*) + - C++ (*) + - C# (*) + - COBOL + - Go (*) + - Java (*) + - JavaScript (requires package `esprima`) + - Kotlin (*) + - Lua (*) + - Perl (*) + - Python + - Ruby (*) + - Rust (*) + - Scala (*) + - TypeScript (*) + + Items marked with (*) require the packages `tree_sitter` and + `tree_sitter_languages`. It is straightforward to add support for additional + languages using `tree_sitter`, although this currently requires modifying LangChain. The language used for parsing can be configured, along with the minimum number of lines required to activate the splitting based on syntax. + If a language is not explicitly specified, `LanguageParser` will infer one from + filename extensions, if present. + Examples: .. code-block:: python diff --git a/libs/community/langchain_community/document_loaders/parsers/language/lua.py b/libs/community/langchain_community/document_loaders/parsers/language/lua.py new file mode 100644 index 0000000000000..3e0a762ba4b5f --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/lua.py @@ -0,0 +1,33 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_definition_statement + name: (identifier)) @function + (local_function_definition_statement + name: (identifier)) @function + ] +""".strip() + + +class LuaSegmenter(TreeSitterSegmenter): + """Code segmenter for Lua.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("lua") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"-- {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/perl.py b/libs/community/langchain_community/document_loaders/parsers/language/perl.py new file mode 100644 index 0000000000000..b68d52cef2b04 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/perl.py @@ -0,0 +1,30 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_definition) @subroutine + ] +""".strip() + + +class PerlSegmenter(TreeSitterSegmenter): + """Code segmenter for Perl.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("perl") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"# {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/python.py b/libs/community/langchain_community/document_loaders/parsers/language/python.py index ca810946bd4e7..52dbc68352a91 100644 --- a/libs/community/langchain_community/document_loaders/parsers/language/python.py +++ b/libs/community/langchain_community/document_loaders/parsers/language/python.py @@ -1,5 +1,5 @@ import ast -from typing import Any, List +from typing import Any, List, Tuple from langchain_community.document_loaders.parsers.language.code_segmenter import ( CodeSegmenter, @@ -39,13 +39,15 @@ def simplify_code(self) -> str: tree = ast.parse(self.code) simplified_lines = self.source_lines[:] + indices_to_del: List[Tuple[int, int]] = [] for node in ast.iter_child_nodes(tree): if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef)): - start = node.lineno - 1 + start, end = node.lineno - 1, node.end_lineno simplified_lines[start] = f"# Code for: {simplified_lines[start]}" + assert isinstance(end, int) + indices_to_del.append((start + 1, end)) - assert isinstance(node.end_lineno, int) - for line_num in range(start + 1, node.end_lineno): - simplified_lines[line_num] = None # type: ignore + for start, end in reversed(indices_to_del): + del simplified_lines[start + 0 : end] - return "\n".join(line for line in simplified_lines if line is not None) + return "\n".join(simplified_lines) diff --git a/libs/community/langchain_community/document_loaders/parsers/language/ruby.py b/libs/community/langchain_community/document_loaders/parsers/language/ruby.py new file mode 100644 index 0000000000000..767a1f94a4d37 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/ruby.py @@ -0,0 +1,32 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (method) @method + (module) @module + (class) @class + ] +""".strip() + + +class RubySegmenter(TreeSitterSegmenter): + """Code segmenter for Ruby.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("ruby") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"# {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/rust.py b/libs/community/langchain_community/document_loaders/parsers/language/rust.py new file mode 100644 index 0000000000000..bb73f96bf6d7c --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/rust.py @@ -0,0 +1,34 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_item + name: (identifier) + body: (block)) @function + (struct_item) @struct + (trait_item) @trait + ] +""".strip() + + +class RustSegmenter(TreeSitterSegmenter): + """Code segmenter for Rust.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("rust") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/scala.py b/libs/community/langchain_community/document_loaders/parsers/language/scala.py new file mode 100644 index 0000000000000..af62a4e748fed --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/scala.py @@ -0,0 +1,33 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (class_definition) @class + (function_definition) @function + (object_definition) @object + (trait_definition) @trait + ] +""".strip() + + +class ScalaSegmenter(TreeSitterSegmenter): + """Code segmenter for Scala.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("scala") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/parsers/language/tree_sitter_segmenter.py b/libs/community/langchain_community/document_loaders/parsers/language/tree_sitter_segmenter.py new file mode 100644 index 0000000000000..7187cd6b5f528 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/tree_sitter_segmenter.py @@ -0,0 +1,108 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, List + +from langchain_community.document_loaders.parsers.language.code_segmenter import ( + CodeSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language, Parser + + +class TreeSitterSegmenter(CodeSegmenter): + """Abstract class for `CodeSegmenter`s that use the tree-sitter library.""" + + def __init__(self, code: str): + super().__init__(code) + self.source_lines = self.code.splitlines() + + try: + import tree_sitter # noqa: F401 + import tree_sitter_languages # noqa: F401 + except ImportError: + raise ImportError( + "Could not import tree_sitter/tree_sitter_languages Python packages. " + "Please install them with " + "`pip install tree-sitter tree-sitter-languages`." + ) + + def is_valid(self) -> bool: + language = self.get_language() + error_query = language.query("(ERROR) @error") + + parser = self.get_parser() + tree = parser.parse(bytes(self.code, encoding="UTF-8")) + + return len(error_query.captures(tree.root_node)) == 0 + + def extract_functions_classes(self) -> List[str]: + language = self.get_language() + query = language.query(self.get_chunk_query()) + + parser = self.get_parser() + tree = parser.parse(bytes(self.code, encoding="UTF-8")) + captures = query.captures(tree.root_node) + + processed_lines = set() + chunks = [] + + for node, name in captures: + start_line = node.start_point[0] + end_line = node.end_point[0] + lines = list(range(start_line, end_line + 1)) + + if any(line in processed_lines for line in lines): + continue + + processed_lines.update(lines) + chunk_text = node.text.decode("UTF-8") + chunks.append(chunk_text) + + return chunks + + def simplify_code(self) -> str: + language = self.get_language() + query = language.query(self.get_chunk_query()) + + parser = self.get_parser() + tree = parser.parse(bytes(self.code, encoding="UTF-8")) + processed_lines = set() + + simplified_lines = self.source_lines[:] + for node, name in query.captures(tree.root_node): + start_line = node.start_point[0] + end_line = node.end_point[0] + + lines = list(range(start_line, end_line + 1)) + if any(line in processed_lines for line in lines): + continue + + simplified_lines[start_line] = self.make_line_comment( + f"Code for: {self.source_lines[start_line]}" + ) + + for line_num in range(start_line + 1, end_line + 1): + simplified_lines[line_num] = None # type: ignore + + processed_lines.update(lines) + + return "\n".join(line for line in simplified_lines if line is not None) + + def get_parser(self) -> "Parser": + from tree_sitter import Parser + + parser = Parser() + parser.set_language(self.get_language()) + return parser + + @abstractmethod + def get_language(self) -> "Language": + raise NotImplementedError() # pragma: no cover + + @abstractmethod + def get_chunk_query(self) -> str: + raise NotImplementedError() # pragma: no cover + + @abstractmethod + def make_line_comment(self, text: str) -> str: + raise NotImplementedError() # pragma: no cover diff --git a/libs/community/langchain_community/document_loaders/parsers/language/typescript.py b/libs/community/langchain_community/document_loaders/parsers/language/typescript.py new file mode 100644 index 0000000000000..ab7158e2e8210 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/parsers/language/typescript.py @@ -0,0 +1,33 @@ +from typing import TYPE_CHECKING + +from langchain_community.document_loaders.parsers.language.tree_sitter_segmenter import ( # noqa: E501 + TreeSitterSegmenter, +) + +if TYPE_CHECKING: + from tree_sitter import Language + + +CHUNK_QUERY = """ + [ + (function_declaration) @function + (class_declaration) @class + (interface_declaration) @interface + (enum_declaration) @enum + ] +""".strip() + + +class TypeScriptSegmenter(TreeSitterSegmenter): + """Code segmenter for TypeScript.""" + + def get_language(self) -> "Language": + from tree_sitter_languages import get_language + + return get_language("typescript") + + def get_chunk_query(self) -> str: + return CHUNK_QUERY + + def make_line_comment(self, text: str) -> str: + return f"// {text}" diff --git a/libs/community/langchain_community/document_loaders/pdf.py b/libs/community/langchain_community/document_loaders/pdf.py index 60c2e7fe48226..ea08605411052 100644 --- a/libs/community/langchain_community/document_loaders/pdf.py +++ b/libs/community/langchain_community/document_loaders/pdf.py @@ -318,7 +318,7 @@ def load(self) -> List[Document]: output_string = StringIO() with open_filename(self.file_path, "rb") as fp: extract_text_to_fp( - fp, # type: ignore[arg-type] + fp, output_string, codec="", laparams=LAParams(), diff --git a/libs/community/langchain_community/document_loaders/pebblo.py b/libs/community/langchain_community/document_loaders/pebblo.py new file mode 100644 index 0000000000000..0875335ac1416 --- /dev/null +++ b/libs/community/langchain_community/document_loaders/pebblo.py @@ -0,0 +1,292 @@ +"""Pebblo's safe dataloader is a wrapper for document loaders""" + +import logging +import os +import uuid +from http import HTTPStatus +from typing import Any, Dict, Iterator, List + +import requests +from langchain_core.documents import Document + +from langchain_community.document_loaders.base import BaseLoader +from langchain_community.utilities.pebblo import ( + CLASSIFIER_URL, + PLUGIN_VERSION, + App, + Doc, + get_full_path, + get_loader_full_path, + get_loader_type, + get_runtime, +) + +logger = logging.getLogger(__name__) + + +class PebbloSafeLoader(BaseLoader): + """Pebblo Safe Loader class is a wrapper around document loaders enabling the data + to be scrutinized. + """ + + _discover_sent: bool = False + _loader_sent: bool = False + + def __init__( + self, + langchain_loader: BaseLoader, + name: str, + owner: str = "", + description: str = "", + ): + if not name or not isinstance(name, str): + raise NameError("Must specify a valid name.") + self.app_name = name + self.load_id = str(uuid.uuid4()) + self.loader = langchain_loader + self.owner = owner + self.description = description + self.source_path = get_loader_full_path(self.loader) + self.source_owner = PebbloSafeLoader.get_file_owner_from_path(self.source_path) + self.docs: List[Document] = [] + loader_name = str(type(self.loader)).split(".")[-1].split("'")[0] + self.source_type = get_loader_type(loader_name) + self.source_path_size = self.get_source_size(self.source_path) + self.source_aggr_size = 0 + self.loader_details = { + "loader": loader_name, + "source_path": self.source_path, + "source_type": self.source_type, + **( + {"source_path_size": str(self.source_path_size)} + if self.source_path_size > 0 + else {} + ), + } + # generate app + self.app = self._get_app_details() + self._send_discover() + + def load(self) -> List[Document]: + """Load Documents. + + Returns: + list: Documents fetched from load method of the wrapped `loader`. + """ + self.docs = self.loader.load() + self._send_loader_doc(loading_end=True) + return self.docs + + def lazy_load(self) -> Iterator[Document]: + """Load documents in lazy fashion. + + Raises: + NotImplementedError: raised when lazy_load id not implemented + within wrapped loader. + + Yields: + list: Documents from loader's lazy loading. + """ + try: + doc_iterator = self.loader.lazy_load() + except NotImplementedError as exc: + err_str = f"{self.loader.__class__.__name__} does not implement lazy_load()" + logger.error(err_str) + raise NotImplementedError(err_str) from exc + while True: + try: + doc = next(doc_iterator) + except StopIteration: + self.docs = [] + self._send_loader_doc(loading_end=True) + break + self.docs = [ + doc, + ] + self._send_loader_doc() + yield doc + + @classmethod + def set_discover_sent(cls) -> None: + cls._discover_sent = True + + @classmethod + def set_loader_sent(cls) -> None: + cls._loader_sent = True + + def _send_loader_doc(self, loading_end: bool = False) -> None: + """Send documents fetched from loader to pebblo-server. Internal method. + + Args: + loading_end (bool, optional): Flag indicating the halt of data + loading by loader. Defaults to False. + """ + headers = {"Accept": "application/json", "Content-Type": "application/json"} + doc_content = [doc.dict() for doc in self.docs] + docs = [] + for doc in doc_content: + doc_source_path = get_full_path(doc.get("metadata", {}).get("source")) + doc_source_owner = PebbloSafeLoader.get_file_owner_from_path( + doc_source_path + ) + doc_source_size = self.get_source_size(doc_source_path) + page_content = str(doc.get("page_content")) + page_content_size = self.calculate_content_size(page_content) + self.source_aggr_size += page_content_size + docs.append( + { + "doc": page_content, + "source_path": doc_source_path, + "last_modified": doc.get("metadata", {}).get("last_modified"), + "file_owner": doc_source_owner, + **( + {"source_path_size": doc_source_size} + if doc_source_size is not None + else {} + ), + } + ) + payload: Dict[str, Any] = { + "name": self.app_name, + "owner": self.owner, + "docs": docs, + "plugin_version": PLUGIN_VERSION, + "load_id": self.load_id, + "loader_details": self.loader_details, + "loading_end": "false", + "source_owner": self.source_owner, + } + if loading_end is True: + payload["loading_end"] = "true" + if "loader_details" in payload: + payload["loader_details"]["source_aggr_size"] = self.source_aggr_size + payload = Doc(**payload).dict(exclude_unset=True) + load_doc_url = f"{CLASSIFIER_URL}/v1/loader/doc" + try: + resp = requests.post( + load_doc_url, headers=headers, json=payload, timeout=20 + ) + if resp.status_code not in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]: + logger.warning( + f"Received unexpected HTTP response code: {resp.status_code}" + ) + logger.debug( + f"send_loader_doc: request \ + url {resp.request.url}, \ + body {str(resp.request.body)[:999]} \ + len {len(resp.request.body if resp.request.body else [])} \ + response status{resp.status_code} body {resp.json()}" + ) + except requests.exceptions.RequestException: + logger.warning("Unable to reach pebblo server.") + except Exception: + logger.warning("An Exception caught in _send_loader_doc.") + if loading_end is True: + PebbloSafeLoader.set_loader_sent() + + @staticmethod + def calculate_content_size(page_content: str) -> int: + """Calculate the content size in bytes: + - Encode the string to bytes using a specific encoding (e.g., UTF-8) + - Get the length of the encoded bytes. + + Args: + page_content (str): Data string. + + Returns: + int: Size of string in bytes. + """ + + # Encode the content to bytes using UTF-8 + encoded_content = page_content.encode("utf-8") + size = len(encoded_content) + return size + + def _send_discover(self) -> None: + """Send app discovery payload to pebblo-server. Internal method.""" + headers = {"Accept": "application/json", "Content-Type": "application/json"} + payload = self.app.dict(exclude_unset=True) + app_discover_url = f"{CLASSIFIER_URL}/v1/app/discover" + try: + resp = requests.post( + app_discover_url, headers=headers, json=payload, timeout=20 + ) + logger.debug( + f"send_discover: request \ + url {resp.request.url}, \ + headers {resp.request.headers}, \ + body {str(resp.request.body)[:999]} \ + len {len(resp.request.body if resp.request.body else [])} \ + response status{resp.status_code} body {resp.json()}" + ) + if resp.status_code in [HTTPStatus.OK, HTTPStatus.BAD_GATEWAY]: + PebbloSafeLoader.set_discover_sent() + else: + logger.warning( + f"Received unexpected HTTP response code: {resp.status_code}" + ) + except requests.exceptions.RequestException: + logger.warning("Unable to reach pebblo server.") + except Exception: + logger.warning("An Exception caught in _send_discover.") + + def _get_app_details(self) -> App: + """Fetch app details. Internal method. + + Returns: + App: App details. + """ + framework, runtime = get_runtime() + app = App( + name=self.app_name, + owner=self.owner, + description=self.description, + load_id=self.load_id, + runtime=runtime, + framework=framework, + plugin_version=PLUGIN_VERSION, + ) + return app + + @staticmethod + def get_file_owner_from_path(file_path: str) -> str: + """Fetch owner of local file path. + + Args: + file_path (str): Local file path. + + Returns: + str: Name of owner. + """ + try: + import pwd + + file_owner_uid = os.stat(file_path).st_uid + file_owner_name = pwd.getpwuid(file_owner_uid).pw_name + except Exception: + file_owner_name = "unknown" + return file_owner_name + + def get_source_size(self, source_path: str) -> int: + """Fetch size of source path. Source can be a directory or a file. + + Args: + source_path (str): Local path of data source. + + Returns: + int: Source size in bytes. + """ + if not source_path: + return 0 + size = 0 + if os.path.isfile(source_path): + size = os.path.getsize(source_path) + elif os.path.isdir(source_path): + total_size = 0 + for dirpath, _, filenames in os.walk(source_path): + for f in filenames: + fp = os.path.join(dirpath, f) + if not os.path.islink(fp): + total_size += os.path.getsize(fp) + size = total_size + return size diff --git a/libs/community/langchain_community/document_loaders/recursive_url_loader.py b/libs/community/langchain_community/document_loaders/recursive_url_loader.py index c24ab1730fd35..08887659841da 100644 --- a/libs/community/langchain_community/document_loaders/recursive_url_loader.py +++ b/libs/community/langchain_community/document_loaders/recursive_url_loader.py @@ -215,6 +215,11 @@ async def _async_get_child_links_recursive( visited: A set of visited URLs. depth: To reach the current url, how many pages have been visited. """ + if not self.use_async or not self._lock: + raise ValueError( + "Async functions forbidden when not initialized with `use_async`" + ) + try: import aiohttp except ImportError: @@ -237,7 +242,7 @@ async def _async_get_child_links_recursive( headers=self.headers, ) ) - async with self._lock: # type: ignore + async with self._lock: visited.add(url) try: async with session.get(url) as response: @@ -277,7 +282,7 @@ async def _async_get_child_links_recursive( # Recursively call the function to get the children of the children sub_tasks = [] - async with self._lock: # type: ignore + async with self._lock: to_visit = set(sub_links).difference(visited) for link in to_visit: sub_tasks.append( diff --git a/libs/community/langchain_community/document_loaders/url_playwright.py b/libs/community/langchain_community/document_loaders/url_playwright.py index 8071d3717f726..106f15cee6f61 100644 --- a/libs/community/langchain_community/document_loaders/url_playwright.py +++ b/libs/community/langchain_community/document_loaders/url_playwright.py @@ -2,7 +2,7 @@ """ import logging from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional from langchain_core.documents import Document @@ -111,6 +111,22 @@ class PlaywrightURLLoader(BaseLoader): urls (List[str]): List of URLs to load. continue_on_failure (bool): If True, continue loading other URLs on failure. headless (bool): If True, the browser will run in headless mode. + proxy (Optional[Dict[str, str]]): If set, the browser will access URLs + through the specified proxy. + + Example: + .. code-block:: python + + from langchain_community.document_loaders import PlaywrightURLLoader + + urls = ["https://api.ipify.org/?format=json",] + proxy={ + "server": "https://xx.xx.xx:15818", # https://: + "username": "username", + "password": "password" + } + loader = PlaywrightURLLoader(urls, proxy=proxy) + data = loader.load() """ def __init__( @@ -120,6 +136,7 @@ def __init__( headless: bool = True, remove_selectors: Optional[List[str]] = None, evaluator: Optional[PlaywrightEvaluator] = None, + proxy: Optional[Dict[str, str]] = None, ): """Load a list of URLs using Playwright.""" try: @@ -133,6 +150,7 @@ def __init__( self.urls = urls self.continue_on_failure = continue_on_failure self.headless = headless + self.proxy = proxy if remove_selectors and evaluator: raise ValueError( @@ -153,7 +171,7 @@ def load(self) -> List[Document]: docs: List[Document] = list() with sync_playwright() as p: - browser = p.chromium.launch(headless=self.headless) + browser = p.chromium.launch(headless=self.headless, proxy=self.proxy) for url in self.urls: try: page = browser.new_page() @@ -186,7 +204,7 @@ async def aload(self) -> List[Document]: docs: List[Document] = list() async with async_playwright() as p: - browser = await p.chromium.launch(headless=self.headless) + browser = await p.chromium.launch(headless=self.headless, proxy=self.proxy) for url in self.urls: try: page = await browser.new_page() diff --git a/libs/community/langchain_community/embeddings/__init__.py b/libs/community/langchain_community/embeddings/__init__.py index 8c50c3f1ab79d..4d9565c60f7cb 100644 --- a/libs/community/langchain_community/embeddings/__init__.py +++ b/libs/community/langchain_community/embeddings/__init__.py @@ -65,11 +65,13 @@ from langchain_community.embeddings.mlflow_gateway import MlflowAIGatewayEmbeddings from langchain_community.embeddings.modelscope_hub import ModelScopeEmbeddings from langchain_community.embeddings.mosaicml import MosaicMLInstructorEmbeddings +from langchain_community.embeddings.nemo import NeMoEmbeddings from langchain_community.embeddings.nlpcloud import NLPCloudEmbeddings from langchain_community.embeddings.oci_generative_ai import OCIGenAIEmbeddings from langchain_community.embeddings.octoai_embeddings import OctoAIEmbeddings from langchain_community.embeddings.ollama import OllamaEmbeddings from langchain_community.embeddings.openai import OpenAIEmbeddings +from langchain_community.embeddings.optimum_intel import QuantizedBiEncoderEmbeddings from langchain_community.embeddings.sagemaker_endpoint import ( SagemakerEndpointEmbeddings, ) @@ -148,6 +150,8 @@ "BookendEmbeddings", "VolcanoEmbeddings", "OCIGenAIEmbeddings", + "QuantizedBiEncoderEmbeddings", + "NeMoEmbeddings", ] diff --git a/libs/community/langchain_community/embeddings/cohere.py b/libs/community/langchain_community/embeddings/cohere.py index fd95b58ea6ce8..2d4676d1254d8 100644 --- a/libs/community/langchain_community/embeddings/cohere.py +++ b/libs/community/langchain_community/embeddings/cohere.py @@ -34,7 +34,7 @@ class CohereEmbeddings(BaseModel, Embeddings): cohere_api_key: Optional[str] = None - max_retries: Optional[int] = None + max_retries: Optional[int] = 3 """Maximum number of retries to make when generating.""" request_timeout: Optional[float] = None """Timeout in seconds for the Cohere API request.""" @@ -92,11 +92,13 @@ def embed( async def aembed( self, texts: List[str], *, input_type: Optional[str] = None ) -> List[List[float]]: - embeddings = await self.async_client.embed( - model=self.model, - texts=texts, - input_type=input_type, - truncate=self.truncate, + embeddings = ( + await self.async_client.embed( + model=self.model, + texts=texts, + input_type=input_type, + truncate=self.truncate, + ) ).embeddings return [list(map(float, e)) for e in embeddings] diff --git a/libs/community/langchain_community/embeddings/infinity.py b/libs/community/langchain_community/embeddings/infinity.py index 4cc5688e0e1e0..f0a01fc75e512 100644 --- a/libs/community/langchain_community/embeddings/infinity.py +++ b/libs/community/langchain_community/embeddings/infinity.py @@ -28,14 +28,14 @@ class InfinityEmbeddings(BaseModel, Embeddings): from langchain_community.embeddings import InfinityEmbeddings InfinityEmbeddings( model="BAAI/bge-small", - infinity_api_url="http://localhost:7797/v1", + infinity_api_url="http://localhost:7997", ) """ model: str "Underlying Infinity model id." - infinity_api_url: str = "http://localhost:7797/v1" + infinity_api_url: str = "http://localhost:7997" """Endpoint URL to use.""" client: Any = None #: :meta private: diff --git a/libs/community/langchain_community/embeddings/nemo.py b/libs/community/langchain_community/embeddings/nemo.py new file mode 100644 index 0000000000000..7ae0ab5da6331 --- /dev/null +++ b/libs/community/langchain_community/embeddings/nemo.py @@ -0,0 +1,169 @@ +from __future__ import annotations + +import asyncio +import json +from typing import Any, Dict, List, Optional + +import aiohttp +import requests +from langchain_core.embeddings import Embeddings +from langchain_core.pydantic_v1 import BaseModel, root_validator + + +def is_endpoint_live(url: str, headers: Optional[dict], payload: Any) -> bool: + """ + Check if an endpoint is live by sending a GET request to the specified URL. + + Args: + url (str): The URL of the endpoint to check. + + Returns: + bool: True if the endpoint is live (status code 200), False otherwise. + + Raises: + Exception: If the endpoint returns a non-successful status code or if there is + an error querying the endpoint. + """ + try: + response = requests.request("POST", url, headers=headers, data=payload) + + # Check if the status code is 200 (OK) + if response.status_code == 200: + return True + else: + # Raise an exception if the status code is not 200 + raise Exception( + f"Endpoint returned a non-successful status code: " + f"{response.status_code}" + ) + except requests.exceptions.RequestException as e: + # Handle any exceptions (e.g., connection errors) + raise Exception(f"Error querying the endpoint: {e}") + + +class NeMoEmbeddings(BaseModel, Embeddings): + batch_size: int = 16 + model: str = "NV-Embed-QA-003" + api_endpoint_url: str = "http://localhost:8088/v1/embeddings" + + @root_validator() + def validate_environment(cls, values: Dict) -> Dict: + """Validate that the end point is alive using the values that are provided.""" + + url = values["api_endpoint_url"] + model = values["model"] + + # Optional: A minimal test payload and headers required by the endpoint + headers = {"Content-Type": "application/json"} + payload = json.dumps( + {"input": "Hello World", "model": model, "input_type": "query"} + ) + + is_endpoint_live(url, headers, payload) + + return values + + async def _aembedding_func( + self, session: Any, text: str, input_type: str + ) -> List[float]: + """Async call out to embedding endpoint. + + Args: + text: The text to embed. + + Returns: + Embeddings for the text. + """ + + headers = {"Content-Type": "application/json"} + + async with session.post( + self.api_endpoint_url, + json={"input": text, "model": self.model, "input_type": input_type}, + headers=headers, + ) as response: + response.raise_for_status() + answer = await response.text() + answer = json.loads(answer) + return answer["data"][0]["embedding"] + + def _embedding_func(self, text: str, input_type: str) -> List[float]: + """Call out to Cohere's embedding endpoint. + + Args: + text: The text to embed. + + Returns: + Embeddings for the text. + """ + + payload = json.dumps( + {"input": text, "model": self.model, "input_type": input_type} + ) + headers = {"Content-Type": "application/json"} + + response = requests.request( + "POST", self.api_endpoint_url, headers=headers, data=payload + ) + response_json = json.loads(response.text) + embedding = response_json["data"][0]["embedding"] + + return embedding + + def embed_documents(self, documents: List[str]) -> List[List[float]]: + """Embed a list of document texts. + + Args: + texts: The list of texts to embed. + + Returns: + List of embeddings, one for each text. + """ + return [self._embedding_func(text, input_type="passage") for text in documents] + + def embed_query(self, text: str) -> List[float]: + return self._embedding_func(text, input_type="query") + + async def aembed_query(self, text: str) -> List[float]: + """Call out to NeMo's embedding endpoint async for embedding query text. + + Args: + text: The text to embed. + + Returns: + Embedding for the text. + """ + + async with aiohttp.ClientSession() as session: + embedding = await self._aembedding_func(session, text, "passage") + return embedding + + async def aembed_documents(self, texts: List[str]) -> List[List[float]]: + """Call out to NeMo's embedding endpoint async for embedding search docs. + + Args: + texts: The list of texts to embed. + + Returns: + List of embeddings, one for each text. + """ + embeddings = [] + + async with aiohttp.ClientSession() as session: + for batch in range(0, len(texts), self.batch_size): + text_batch = texts[batch : batch + self.batch_size] + + for text in text_batch: + # Create tasks for all texts in the batch + tasks = [ + self._aembedding_func(session, text, "passage") + for text in text_batch + ] + + # Run all tasks concurrently + batch_results = await asyncio.gather(*tasks) + + # Extend the embeddings list with results from this batch + embeddings.extend(batch_results) + + return embeddings diff --git a/libs/community/langchain_community/embeddings/optimum_intel.py b/libs/community/langchain_community/embeddings/optimum_intel.py new file mode 100644 index 0000000000000..56d4a6f20ac6f --- /dev/null +++ b/libs/community/langchain_community/embeddings/optimum_intel.py @@ -0,0 +1,208 @@ +from typing import Any, Dict, List, Optional + +from langchain_core.embeddings import Embeddings +from langchain_core.pydantic_v1 import BaseModel, Extra + + +class QuantizedBiEncoderEmbeddings(BaseModel, Embeddings): + """Quantized bi-encoders embedding models. + + Please ensure that you have installed optimum-intel and ipex. + + Input: + model_name: str = Model name. + max_seq_len: int = The maximum sequence length for tokenization. (default 512) + pooling_strategy: str = + "mean" or "cls", pooling strategy for the final layer. (default "mean") + query_instruction: Optional[str] = + An instruction to add to the query before embedding. (default None) + document_instruction: Optional[str] = + An instruction to add to each document before embedding. (default None) + padding: Optional[bool] = + Whether to add padding during tokenization or not. (default True) + model_kwargs: Optional[Dict] = + Parameters to add to the model during initialization. (default {}) + encode_kwargs: Optional[Dict] = + Parameters to add during the embedding forward pass. (default {}) + + Example: + + from langchain_community.embeddings import QuantizedBiEncoderEmbeddings + + model_name = "Intel/bge-small-en-v1.5-rag-int8-static" + encode_kwargs = {'normalize_embeddings': True} + hf = QuantizedBiEncoderEmbeddings( + model_name, + encode_kwargs=encode_kwargs, + query_instruction="Represent this sentence for searching relevant passages: " + ) + """ + + def __init__( + self, + model_name: str, + max_seq_len: int = 512, + pooling_strategy: str = "mean", # "mean" or "cls" + query_instruction: Optional[str] = None, + document_instruction: Optional[str] = None, + padding: bool = True, + model_kwargs: Optional[Dict] = None, + encode_kwargs: Optional[Dict] = None, + **kwargs: Any, + ) -> None: + super().__init__(**kwargs) + self.model_name_or_path = model_name + self.max_seq_len = max_seq_len + self.pooling = pooling_strategy + self.padding = padding + self.encode_kwargs = encode_kwargs or {} + self.model_kwargs = model_kwargs or {} + + self.normalize = self.encode_kwargs.get("normalize_embeddings", False) + self.batch_size = self.encode_kwargs.get("batch_size", 32) + + self.query_instruction = query_instruction + self.document_instruction = document_instruction + + self.load_model() + + def load_model(self) -> None: + try: + from transformers import AutoTokenizer + except ImportError as e: + raise ImportError( + "Unable to import transformers, please install with " + "`pip install -U transformers`." + ) from e + try: + from optimum.intel import IPEXModel + + self.transformer_model = IPEXModel.from_pretrained( + self.model_name_or_path, **self.model_kwargs + ) + except Exception as e: + raise Exception( + f""" +Failed to load model {self.model_name_or_path}, due to the following error: +{e} +Please ensure that you have installed optimum-intel and ipex correctly,using: + +pip install optimum[neural-compressor] +pip install intel_extension_for_pytorch + +For more information, please visit: +* Install optimum-intel as shown here: https://github.com/huggingface/optimum-intel. +* Install IPEX as shown here: https://intel.github.io/intel-extension-for-pytorch/index.html#installation?platform=cpu&version=v2.2.0%2Bcpu. +""" + ) + self.transformer_tokenizer = AutoTokenizer.from_pretrained( + pretrained_model_name_or_path=self.model_name_or_path, + ) + self.transformer_model.eval() + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.allow + + def _embed(self, inputs: Any) -> Any: + try: + import torch + except ImportError as e: + raise ImportError( + "Unable to import torch, please install with `pip install -U torch`." + ) from e + with torch.inference_mode(): + outputs = self.transformer_model(**inputs) + if self.pooling == "mean": + emb = self._mean_pooling(outputs, inputs["attention_mask"]) + elif self.pooling == "cls": + emb = self._cls_pooling(outputs) + else: + raise ValueError("pooling method no supported") + + if self.normalize: + emb = torch.nn.functional.normalize(emb, p=2, dim=1) + return emb + + @staticmethod + def _cls_pooling(outputs: Any) -> Any: + if isinstance(outputs, dict): + token_embeddings = outputs["last_hidden_state"] + else: + token_embeddings = outputs[0] + return token_embeddings[:, 0] + + @staticmethod + def _mean_pooling(outputs: Any, attention_mask: Any) -> Any: + try: + import torch + except ImportError as e: + raise ImportError( + "Unable to import torch, please install with `pip install -U torch`." + ) from e + if isinstance(outputs, dict): + token_embeddings = outputs["last_hidden_state"] + else: + # First element of model_output contains all token embeddings + token_embeddings = outputs[0] + input_mask_expanded = ( + attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float() + ) + sum_embeddings = torch.sum(token_embeddings * input_mask_expanded, 1) + sum_mask = torch.clamp(input_mask_expanded.sum(1), min=1e-9) + return sum_embeddings / sum_mask + + def _embed_text(self, texts: List[str]) -> List[List[float]]: + inputs = self.transformer_tokenizer( + texts, + max_length=self.max_seq_len, + truncation=True, + padding=self.padding, + return_tensors="pt", + ) + return self._embed(inputs).tolist() + + def embed_documents(self, texts: List[str]) -> List[List[float]]: + """Embed a list of text documents using the Optimized Embedder model. + + Input: + texts: List[str] = List of text documents to embed. + Output: + List[List[float]] = The embeddings of each text document. + """ + try: + import pandas as pd + except ImportError as e: + raise ImportError( + "Unable to import pandas, please install with `pip install -U pandas`." + ) from e + try: + import tqdm + except ImportError as e: + raise ImportError( + "Unable to import tqdm, please install with `pip install -U tqdm`." + ) from e + docs = [ + self.document_instruction + d if self.document_instruction else d + for d in texts + ] + + # group into batches + text_list_df = pd.DataFrame(docs, columns=["texts"]).reset_index() + + # assign each example with its batch + text_list_df["batch_index"] = text_list_df["index"] // self.batch_size + + # create groups + batches = list(text_list_df.groupby(["batch_index"])["texts"].apply(list)) + + vectors = [] + for batch in tqdm(batches, desc="Batches"): + vectors += self._embed_text(batch) + return vectors + + def embed_query(self, text: str) -> List[float]: + if self.query_instruction: + text = self.query_instruction + text + return self._embed_text([text])[0] diff --git a/libs/community/langchain_community/embeddings/voyageai.py b/libs/community/langchain_community/embeddings/voyageai.py index 93109d45c65b6..f8b1a4059e6d5 100644 --- a/libs/community/langchain_community/embeddings/voyageai.py +++ b/libs/community/langchain_community/embeddings/voyageai.py @@ -86,6 +86,15 @@ class VoyageEmbeddings(BaseModel, Embeddings): show_progress_bar: bool = False """Whether to show a progress bar when embedding. Must have tqdm installed if set to True.""" + truncation: Optional[bool] = None + """Whether to truncate the input texts to fit within the context length. + + If True, over-length input texts will be truncated to fit within the context + length, before vectorized by the embedding model. If False, an error will be + raised if any given text exceeds the context length. If not specified + (defaults to None), we will truncate the input text before sending it to the + embedding model if it slightly exceeds the context window length. If it + significantly exceeds the context window length, an error will be raised.""" class Config: """Configuration for this pydantic object.""" @@ -104,12 +113,14 @@ def _invocation_params( self, input: List[str], input_type: Optional[str] = None ) -> Dict: api_key = cast(SecretStr, self.voyage_api_key).get_secret_value() - params = { + params: Dict = { "url": self.voyage_api_base, "headers": {"Authorization": f"Bearer {api_key}"}, "json": {"model": self.model, "input": input, "input_type": input_type}, "timeout": self.request_timeout, } + if self.truncation is not None: + params["json"]["truncation"] = self.truncation return params def _get_embeddings( diff --git a/libs/community/langchain_community/graphs/__init__.py b/libs/community/langchain_community/graphs/__init__.py index bd15f6465d1b4..c1fc640c4b1e7 100644 --- a/libs/community/langchain_community/graphs/__init__.py +++ b/libs/community/langchain_community/graphs/__init__.py @@ -8,6 +8,7 @@ from langchain_community.graphs.nebula_graph import NebulaGraph from langchain_community.graphs.neo4j_graph import Neo4jGraph from langchain_community.graphs.neptune_graph import NeptuneGraph +from langchain_community.graphs.neptune_rdf_graph import NeptuneRdfGraph from langchain_community.graphs.networkx_graph import NetworkxEntityGraph from langchain_community.graphs.ontotext_graphdb_graph import OntotextGraphDBGraph from langchain_community.graphs.rdf_graph import RdfGraph @@ -19,6 +20,7 @@ "Neo4jGraph", "NebulaGraph", "NeptuneGraph", + "NeptuneRdfGraph", "KuzuGraph", "HugeGraph", "RdfGraph", diff --git a/libs/community/langchain_community/graphs/kuzu_graph.py b/libs/community/langchain_community/graphs/kuzu_graph.py index eda7417f94064..1f99f49fc9435 100644 --- a/libs/community/langchain_community/graphs/kuzu_graph.py +++ b/libs/community/langchain_community/graphs/kuzu_graph.py @@ -36,10 +36,7 @@ def get_schema(self) -> str: def query(self, query: str, params: dict = {}) -> List[Dict[str, Any]]: """Query Kรนzu database""" - params_list = [] - for param_name in params: - params_list.append([param_name, params[param_name]]) - result = self.conn.execute(query, params_list) + result = self.conn.execute(query, params) column_names = result.get_column_names() return_list = [] while result.has_next(): @@ -79,20 +76,16 @@ def refresh_schema(self) -> None: rel_properties = [] for table in rel_tables: - current_table_schema = {"properties": [], "label": table["name"]} - properties_text = self.conn._connection.get_rel_property_names( - table["name"] - ).split("\n") - for i, line in enumerate(properties_text): - # The first 3 lines defines src, dst and name, so we skip them - if i < 3: - continue - if not line: - continue - property_name, property_type = line.strip().split(" ") - current_table_schema["properties"].append( - (property_name, property_type) - ) + table_name = table["name"] + current_table_schema = {"properties": [], "label": table_name} + query_result = self.conn.execute( + f"CALL table_info('{table_name}') RETURN *;" + ) + while query_result.has_next(): + row = query_result.get_next() + prop_name = row[1] + prop_type = row[2] + current_table_schema["properties"].append((prop_name, prop_type)) rel_properties.append(current_table_schema) self.schema = ( diff --git a/libs/community/langchain_community/graphs/neptune_rdf_graph.py b/libs/community/langchain_community/graphs/neptune_rdf_graph.py new file mode 100644 index 0000000000000..b9e0074a366ee --- /dev/null +++ b/libs/community/langchain_community/graphs/neptune_rdf_graph.py @@ -0,0 +1,256 @@ +import json +from types import SimpleNamespace +from typing import Any, Dict, Optional, Sequence + +import requests + +CLASS_QUERY = """ +SELECT DISTINCT ?elem ?com +WHERE { + ?instance a ?elem . + OPTIONAL { ?instance rdf:type/rdfs:subClassOf* ?elem } . + #FILTER (isIRI(?elem)) . + OPTIONAL { ?elem rdfs:comment ?com filter (lang(?com) = "en")} +} +""" + +REL_QUERY = """ +SELECT DISTINCT ?elem ?com +WHERE { + ?subj ?elem ?obj . + OPTIONAL { + ?elem rdf:type/rdfs:subPropertyOf* ?proptype . + VALUES ?proptype { rdf:Property owl:DatatypeProperty owl:ObjectProperty } . + } . + OPTIONAL { ?elem rdfs:comment ?com filter (lang(?com) = "en")} +} +""" + +DTPROP_QUERY = """ +SELECT DISTINCT ?elem ?com +WHERE { + ?subj ?elem ?obj . + OPTIONAL { + ?elem rdf:type/rdfs:subPropertyOf* ?proptype . + ?proptype a owl:DatatypeProperty . + } . + OPTIONAL { ?elem rdfs:comment ?com filter (lang(?com) = "en")} +} +""" + +OPROP_QUERY = """ +SELECT DISTINCT ?elem ?com +WHERE { + ?subj ?elem ?obj . + OPTIONAL { + ?elem rdf:type/rdfs:subPropertyOf* ?proptype . + ?proptype a owl:ObjectProperty . + } . + OPTIONAL { ?elem rdfs:comment ?com filter (lang(?com) = "en")} +} +""" + +ELEM_TYPES = { + "classes": CLASS_QUERY, + "rels": REL_QUERY, + "dtprops": DTPROP_QUERY, + "oprops": OPROP_QUERY, +} + + +class NeptuneRdfGraph: + """Neptune wrapper for RDF graph operations. + + Args: + host: SPARQL endpoint host for Neptune + port: SPARQL endpoint port for Neptune. Defaults 8182. + use_iam_auth: boolean indicating IAM auth is enabled in Neptune cluster + region_name: AWS region required if use_iam_auth is True, e.g., us-west-2 + hide_comments: whether to include ontology comments in schema for prompt + + Example: + .. code-block:: python + + graph = NeptuneRdfGraph( + host=', + port=, + use_iam_auth=False + ) + schema = graph.get_schema() + + OR + graph = NeptuneRdfGraph( + host=', + port=, + use_iam_auth=False + ) + schema_elem = graph.get_schema_elements() + ... change schema_elements ... + graph.load_schema(schema_elem) + schema = graph.get_schema() + + *Security note*: Make sure that the database connection uses credentials + that are narrowly-scoped to only include necessary permissions. + Failure to do so may result in data corruption or loss, since the calling + code may attempt commands that would result in deletion, mutation + of data if appropriately prompted or reading sensitive data if such + data is present in the database. + The best way to guard against such negative outcomes is to (as appropriate) + limit the permissions granted to the credentials used with this tool. + + See https://python.langchain.com/docs/security for more information. + """ + + def __init__( + self, + host: str, + port: int = 8182, + use_iam_auth: bool = False, + region_name: Optional[str] = None, + hide_comments: bool = False, + ) -> None: + self.use_iam_auth = use_iam_auth + self.region_name = region_name + self.hide_comments = hide_comments + self.query_endpoint = f"https://{host}:{port}/sparql" + + if self.use_iam_auth: + try: + import boto3 + + self.session = boto3.Session() + except ImportError: + raise ImportError( + "Could not import boto3 python package. " + "Please install it with `pip install boto3`." + ) + else: + self.session = None + + # Set schema + self.schema = "" + self.schema_elements: Dict[str, Any] = {} + self._refresh_schema() + + @property + def get_schema(self) -> str: + """ + Returns the schema of the graph database. + """ + return self.schema + + @property + def get_schema_elements(self) -> Dict[str, Any]: + return self.schema_elements + + def query( + self, + query: str, + ) -> Dict[str, Any]: + """ + Run Neptune query. + """ + request_data = {"query": query} + data = request_data + request_hdr = None + + if self.use_iam_auth: + credentials = self.session.get_credentials() + credentials = credentials.get_frozen_credentials() + access_key = credentials.access_key + secret_key = credentials.secret_key + service = "neptune-db" + session_token = credentials.token + params = None + creds = SimpleNamespace( + access_key=access_key, + secret_key=secret_key, + token=session_token, + region=self.region_name, + ) + from botocore.awsrequest import AWSRequest + + request = AWSRequest( + method="POST", url=self.query_endpoint, data=data, params=params + ) + from botocore.auth import SigV4Auth + + SigV4Auth(creds, service, self.region_name).add_auth(request) + request.headers["Content-Type"] = "application/x-www-form-urlencoded" + request_hdr = request.headers + else: + request_hdr = {} + request_hdr["Content-Type"] = "application/x-www-form-urlencoded" + + queryres = requests.request( + method="POST", url=self.query_endpoint, headers=request_hdr, data=data + ) + json_resp = json.loads(queryres.text) + return json_resp + + def load_schema(self, schema_elements: Dict[str, Any]) -> None: + """ + Generates and sets schema from schema_elements. Helpful in + cases where introspected schema needs pruning. + """ + + elem_str = {} + for elem in ELEM_TYPES: + res_list = [] + for elem_rec in self.schema_elements[elem]: + uri = elem_rec["uri"] + local = elem_rec["local"] + res_str = f"<{uri}> ({local})" + if self.hide_comments is False: + res_str = res_str + f", {elem_rec['comment']}" + res_list.append(res_str) + elem_str[elem] = ", ".join(res_list) + + self.schema = ( + "In the following, each IRI is followed by the local name and " + "optionally its description in parentheses. \n" + "The graph supports the following node types:\n" + f"{elem_str['classes']}" + "The graph supports the following relationships:\n" + f"{elem_str['rels']}" + "The graph supports the following OWL object properties, " + f"{elem_str['dtprops']}" + "The graph supports the following OWL data properties, " + f"{elem_str['oprops']}" + ) + + def _get_local_name(self, iri: str) -> Sequence[str]: + """ + Split IRI into prefix and local + """ + if "#" in iri: + tokens = iri.split("#") + return [f"{tokens[0]}#", tokens[-1]] + elif "/" in iri: + tokens = iri.split("/") + return [f"{'/'.join(tokens[0:len(tokens)-1])}/", tokens[-1]] + else: + raise ValueError(f"Unexpected IRI '{iri}', contains neither '#' nor '/'.") + + def _refresh_schema(self) -> None: + """ + Query Neptune to introspect schema. + """ + self.schema_elements["distinct_prefixes"] = {} + + for elem in ELEM_TYPES: + items = self.query(ELEM_TYPES[elem]) + reslist = [] + for r in items["results"]["bindings"]: + uri = r["elem"]["value"] + tokens = self._get_local_name(uri) + elem_record = {"uri": uri, "local": tokens[1]} + if not self.hide_comments: + elem_record["comment"] = r["com"]["value"] if "com" in r else "" + reslist.append(elem_record) + if tokens[0] not in self.schema_elements["distinct_prefixes"]: + self.schema_elements["distinct_prefixes"][tokens[0]] = "y" + + self.schema_elements[elem] = reslist + + self.load_schema(self.schema_elements) diff --git a/libs/community/langchain_community/graphs/networkx_graph.py b/libs/community/langchain_community/graphs/networkx_graph.py index 81b7862fab25f..ccc0bdad664f2 100644 --- a/libs/community/langchain_community/graphs/networkx_graph.py +++ b/libs/community/langchain_community/graphs/networkx_graph.py @@ -135,6 +135,14 @@ def clear(self) -> None: """Clear the graph.""" self._graph.clear() + def clear_edges(self) -> None: + """Clear the graph edges.""" + self._graph.clear_edges() + + def get_number_of_nodes(self) -> int: + """Get number of nodes in the graph.""" + return self._graph.number_of_nodes() + def get_topological_sort(self) -> List[str]: """Get a list of entity names in the graph sorted by causal dependence.""" import networkx as nx diff --git a/libs/community/langchain_community/llms/__init__.py b/libs/community/langchain_community/llms/__init__.py index 2ccf4199c41a7..9adaf0af1be14 100644 --- a/libs/community/langchain_community/llms/__init__.py +++ b/libs/community/langchain_community/llms/__init__.py @@ -570,6 +570,12 @@ def _import_yandex_gpt() -> Any: return YandexGPT +def _import_yuan2() -> Any: + from langchain_community.llms.yuan2 import Yuan2 + + return Yuan2 + + def _import_volcengine_maas() -> Any: from langchain_community.llms.volcengine_maas import VolcEngineMaasLLM @@ -753,6 +759,8 @@ def __getattr__(name: str) -> Any: return _import_xinference() elif name == "YandexGPT": return _import_yandex_gpt() + elif name == "Yuan2": + return _import_yuan2() elif name == "VolcEngineMaasLLM": return _import_volcengine_maas() elif name == "type_to_cls_dict": @@ -851,6 +859,7 @@ def __getattr__(name: str) -> Any: "JavelinAIGateway", "QianfanLLMEndpoint", "YandexGPT", + "Yuan2", "VolcEngineMaasLLM", ] @@ -939,5 +948,6 @@ def get_type_to_cls_dict() -> Dict[str, Callable[[], Type[BaseLLM]]]: "javelin-ai-gateway": _import_javelin_ai_gateway, "qianfan_endpoint": _import_baidu_qianfan_endpoint, "yandex_gpt": _import_yandex_gpt, + "yuan2": _import_yuan2, "VolcEngineMaasLLM": _import_volcengine_maas, } diff --git a/libs/community/langchain_community/llms/anthropic.py b/libs/community/langchain_community/llms/anthropic.py index be832cf1368c2..f5674a0766762 100644 --- a/libs/community/langchain_community/llms/anthropic.py +++ b/libs/community/langchain_community/llms/anthropic.py @@ -52,6 +52,9 @@ class _AnthropicCommon(BaseLanguageModel): default_request_timeout: Optional[float] = None """Timeout for requests to Anthropic Completion API. Default is 600 seconds.""" + max_retries: int = 2 + """Number of retries allowed for requests sent to the Anthropic Completion API.""" + anthropic_api_url: Optional[str] = None anthropic_api_key: Optional[SecretStr] = None @@ -92,11 +95,13 @@ def validate_environment(cls, values: Dict) -> Dict: base_url=values["anthropic_api_url"], api_key=values["anthropic_api_key"].get_secret_value(), timeout=values["default_request_timeout"], + max_retries=values["max_retries"], ) values["async_client"] = anthropic.AsyncAnthropic( base_url=values["anthropic_api_url"], api_key=values["anthropic_api_key"].get_secret_value(), timeout=values["default_request_timeout"], + max_retries=values["max_retries"], ) values["HUMAN_PROMPT"] = anthropic.HUMAN_PROMPT values["AI_PROMPT"] = anthropic.AI_PROMPT diff --git a/libs/community/langchain_community/llms/bedrock.py b/libs/community/langchain_community/llms/bedrock.py index d3264703c7cc4..f4d7b1d69e187 100644 --- a/libs/community/langchain_community/llms/bedrock.py +++ b/libs/community/langchain_community/llms/bedrock.py @@ -1,11 +1,8 @@ -from __future__ import annotations - import asyncio import json import warnings from abc import ABC from typing import ( - TYPE_CHECKING, Any, AsyncGenerator, AsyncIterator, @@ -31,9 +28,6 @@ get_token_ids_anthropic, ) -if TYPE_CHECKING: - from botocore.config import Config - AMAZON_BEDROCK_TRACE_KEY = "amazon-bedrock-trace" GUARDRAILS_BODY_KEY = "amazon-bedrock-guardrailAssessment" HUMAN_PROMPT = "\n\nHuman:" @@ -226,7 +220,7 @@ class BedrockBase(BaseModel, ABC): See: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html """ - config: Optional[Config] = None + config: Any = None """An optional botocore.config.Config instance to pass to the client.""" provider: Optional[str] = None diff --git a/libs/community/langchain_community/llms/huggingface_endpoint.py b/libs/community/langchain_community/llms/huggingface_endpoint.py index c14b2e24a8050..df25bf367e8b4 100644 --- a/libs/community/langchain_community/llms/huggingface_endpoint.py +++ b/libs/community/langchain_community/llms/huggingface_endpoint.py @@ -1,12 +1,17 @@ -from typing import Any, Dict, List, Mapping, Optional +import json +import logging +from typing import Any, AsyncIterator, Dict, Iterator, List, Mapping, Optional -import requests -from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) from langchain_core.language_models.llms import LLM -from langchain_core.pydantic_v1 import Extra, root_validator -from langchain_core.utils import get_from_dict_or_env +from langchain_core.outputs import GenerationChunk +from langchain_core.pydantic_v1 import Extra, Field, root_validator +from langchain_core.utils import get_from_dict_or_env, get_pydantic_field_names -from langchain_community.llms.utils import enforce_stop_tokens +logger = logging.getLogger(__name__) VALID_TASKS = ( "text2text-generation", @@ -17,70 +22,198 @@ class HuggingFaceEndpoint(LLM): - """HuggingFace Endpoint models. - - To use, you should have the ``huggingface_hub`` python package installed, and the - environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, or pass - it as a named parameter to the constructor. + """ + HuggingFace Endpoint. - Only supports `text-generation` and `text2text-generation` for now. + To use this class, you should have installed the ``huggingface_hub`` package, and + the environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, + or given as a named parameter to the constructor. Example: .. code-block:: python - from langchain_community.llms import HuggingFaceEndpoint - endpoint_url = ( - "https://abcdefghijklmnop.us-east-1.aws.endpoints.huggingface.cloud" + # Basic Example (no streaming) + llm = HuggingFaceEndpoint( + endpoint_url="http://localhost:8010/", + max_new_tokens=512, + top_k=10, + top_p=0.95, + typical_p=0.95, + temperature=0.01, + repetition_penalty=1.03, + huggingfacehub_api_token="my-api-key" ) - hf = HuggingFaceEndpoint( - endpoint_url=endpoint_url, + print(llm("What is Deep Learning?")) + + # Streaming response example + from langchain_community.callbacks import streaming_stdout + + callbacks = [streaming_stdout.StreamingStdOutCallbackHandler()] + llm = HuggingFaceEndpoint( + endpoint_url="http://localhost:8010/", + max_new_tokens=512, + top_k=10, + top_p=0.95, + typical_p=0.95, + temperature=0.01, + repetition_penalty=1.03, + callbacks=callbacks, + streaming=True, huggingfacehub_api_token="my-api-key" ) + print(llm("What is Deep Learning?")) + """ - endpoint_url: str = "" + endpoint_url: Optional[str] = None """Endpoint URL to use.""" + repo_id: Optional[str] = None + """Repo to use.""" + huggingfacehub_api_token: Optional[str] = None + max_new_tokens: int = 512 + """Maximum number of generated tokens""" + top_k: Optional[int] = None + """The number of highest probability vocabulary tokens to keep for + top-k-filtering.""" + top_p: Optional[float] = 0.95 + """If set to < 1, only the smallest set of most probable tokens with probabilities + that add up to `top_p` or higher are kept for generation.""" + typical_p: Optional[float] = 0.95 + """Typical Decoding mass. See [Typical Decoding for Natural Language + Generation](https://arxiv.org/abs/2202.00666) for more information.""" + temperature: Optional[float] = 0.8 + """The value used to module the logits distribution.""" + repetition_penalty: Optional[float] = None + """The parameter for repetition penalty. 1.0 means no penalty. + See [this paper](https://arxiv.org/pdf/1909.05858.pdf) for more details.""" + return_full_text: bool = False + """Whether to prepend the prompt to the generated text""" + truncate: Optional[int] = None + """Truncate inputs tokens to the given size""" + stop_sequences: List[str] = Field(default_factory=list) + """Stop generating tokens if a member of `stop_sequences` is generated""" + seed: Optional[int] = None + """Random sampling seed""" + inference_server_url: str = "" + """text-generation-inference instance base url""" + timeout: int = 120 + """Timeout in seconds""" + streaming: bool = False + """Whether to generate a stream of tokens asynchronously""" + do_sample: bool = False + """Activate logits sampling""" + watermark: bool = False + """Watermarking with [A Watermark for Large Language Models] + (https://arxiv.org/abs/2301.10226)""" + server_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any text-generation-inference server parameters not explicitly specified""" + model_kwargs: Dict[str, Any] = Field(default_factory=dict) + """Holds any model parameters valid for `call` not explicitly specified""" + model: str + client: Any + async_client: Any task: Optional[str] = None """Task to call the model with. Should be a task that returns `generated_text` or `summary_text`.""" - model_kwargs: Optional[dict] = None - """Keyword arguments to pass to the model.""" - - huggingfacehub_api_token: Optional[str] = None class Config: """Configuration for this pydantic object.""" extra = Extra.forbid + @root_validator(pre=True) + def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: + """Build extra kwargs from additional params that were passed in.""" + all_required_field_names = get_pydantic_field_names(cls) + extra = values.get("model_kwargs", {}) + for field_name in list(values): + if field_name in extra: + raise ValueError(f"Found {field_name} supplied twice.") + if field_name not in all_required_field_names: + logger.warning( + f"""WARNING! {field_name} is not default parameter. + {field_name} was transferred to model_kwargs. + Please make sure that {field_name} is what you intended.""" + ) + extra[field_name] = values.pop(field_name) + + invalid_model_kwargs = all_required_field_names.intersection(extra.keys()) + if invalid_model_kwargs: + raise ValueError( + f"Parameters {invalid_model_kwargs} should be specified explicitly. " + f"Instead they were passed in as part of `model_kwargs` parameter." + ) + + values["model_kwargs"] = extra + if "endpoint_url" not in values and "repo_id" not in values: + raise ValueError( + "Please specify an `endpoint_url` or `repo_id` for the model." + ) + if "endpoint_url" in values and "repo_id" in values: + raise ValueError( + "Please specify either an `endpoint_url` OR a `repo_id`, not both." + ) + values["model"] = values.get("endpoint_url") or values.get("repo_id") + return values + @root_validator() def validate_environment(cls, values: Dict) -> Dict: - """Validate that api key and python package exists in environment.""" - huggingfacehub_api_token = get_from_dict_or_env( - values, "huggingfacehub_api_token", "HUGGINGFACEHUB_API_TOKEN" - ) + """Validate that package is installed and that the API token is valid.""" try: - from huggingface_hub.hf_api import HfApi - - try: - HfApi( - endpoint="https://huggingface.co", # Can be a Private Hub endpoint. - token=huggingfacehub_api_token, - ).whoami() - except Exception as e: - raise ValueError( - "Could not authenticate with huggingface_hub. " - "Please check your API token." - ) from e + from huggingface_hub import login except ImportError: raise ImportError( "Could not import huggingface_hub python package. " "Please install it with `pip install huggingface_hub`." ) - values["huggingfacehub_api_token"] = huggingfacehub_api_token + try: + huggingfacehub_api_token = get_from_dict_or_env( + values, "huggingfacehub_api_token", "HUGGINGFACEHUB_API_TOKEN" + ) + login(token=huggingfacehub_api_token) + except Exception as e: + raise ValueError( + "Could not authenticate with huggingface_hub. " + "Please check your API token." + ) from e + + from huggingface_hub import AsyncInferenceClient, InferenceClient + + values["client"] = InferenceClient( + model=values["model"], + timeout=values["timeout"], + token=huggingfacehub_api_token, + **values["server_kwargs"], + ) + values["async_client"] = AsyncInferenceClient( + model=values["model"], + timeout=values["timeout"], + token=huggingfacehub_api_token, + **values["server_kwargs"], + ) + return values + @property + def _default_params(self) -> Dict[str, Any]: + """Get the default parameters for calling text generation inference API.""" + return { + "max_new_tokens": self.max_new_tokens, + "top_k": self.top_k, + "top_p": self.top_p, + "typical_p": self.typical_p, + "temperature": self.temperature, + "repetition_penalty": self.repetition_penalty, + "return_full_text": self.return_full_text, + "truncate": self.truncate, + "stop_sequences": self.stop_sequences, + "seed": self.seed, + "do_sample": self.do_sample, + "watermark": self.watermark, + **self.model_kwargs, + } + @property def _identifying_params(self) -> Mapping[str, Any]: """Get the identifying parameters.""" @@ -95,6 +228,13 @@ def _llm_type(self) -> str: """Return type of llm.""" return "huggingface_endpoint" + def _invocation_params( + self, runtime_stop: Optional[List[str]], **kwargs: Any + ) -> Dict[str, Any]: + params = {**self._default_params, **kwargs} + params["stop_sequences"] = params["stop_sequences"] + (runtime_stop or []) + return params + def _call( self, prompt: str, @@ -102,62 +242,129 @@ def _call( run_manager: Optional[CallbackManagerForLLMRun] = None, **kwargs: Any, ) -> str: - """Call out to HuggingFace Hub's inference endpoint. + """Call out to HuggingFace Hub's inference endpoint.""" + invocation_params = self._invocation_params(stop, **kwargs) + if self.streaming: + completion = "" + for chunk in self._stream(prompt, stop, run_manager, **invocation_params): + completion += chunk.text + return completion + else: + invocation_params["stop"] = invocation_params[ + "stop_sequences" + ] # porting 'stop_sequences' into the 'stop' argument + response = self.client.post( + json={"inputs": prompt, "parameters": invocation_params}, + stream=False, + task=self.task, + ) + response_text = json.loads(response.decode())[0]["generated_text"] - Args: - prompt: The prompt to pass into the model. - stop: Optional list of stop words to use when generating. + # Maybe the generation has stopped at one of the stop sequences: + # then we remove this stop sequence from the end of the generated text + for stop_seq in invocation_params["stop_sequences"]: + if response_text[-len(stop_seq) :] == stop_seq: + response_text = response_text[: -len(stop_seq)] + return response_text - Returns: - The string generated by the model. + async def _acall( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> str: + invocation_params = self._invocation_params(stop, **kwargs) + if self.streaming: + completion = "" + async for chunk in self._astream( + prompt, stop, run_manager, **invocation_params + ): + completion += chunk.text + return completion + else: + invocation_params["stop"] = invocation_params["stop_sequences"] + response = await self.async_client.post( + json={"inputs": prompt, "parameters": invocation_params}, + stream=False, + task=self.task, + ) + response_text = json.loads(response.decode())[0]["generated_text"] - Example: - .. code-block:: python + # Maybe the generation has stopped at one of the stop sequences: + # then remove this stop sequence from the end of the generated text + for stop_seq in invocation_params["stop_sequences"]: + if response_text[-len(stop_seq) :] == stop_seq: + response_text = response_text[: -len(stop_seq)] + return response_text - response = hf("Tell me a joke.") - """ - _model_kwargs = self.model_kwargs or {} + def _stream( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[GenerationChunk]: + invocation_params = self._invocation_params(stop, **kwargs) - # payload samples - params = {**_model_kwargs, **kwargs} - parameter_payload = {"inputs": prompt, "parameters": params} + for response in self.client.text_generation( + prompt, **invocation_params, stream=True + ): + # identify stop sequence in generated text, if any + stop_seq_found: Optional[str] = None + for stop_seq in invocation_params["stop_sequences"]: + if stop_seq in response: + stop_seq_found = stop_seq - # HTTP headers for authorization - headers = { - "Authorization": f"Bearer {self.huggingfacehub_api_token}", - "Content-Type": "application/json", - } + # identify text to yield + text: Optional[str] = None + if stop_seq_found: + text = response[: response.index(stop_seq_found)] + else: + text = response - # send request - try: - response = requests.post( - self.endpoint_url, headers=headers, json=parameter_payload - ) - except requests.exceptions.RequestException as e: # This is the correct syntax - raise ValueError(f"Error raised by inference endpoint: {e}") - generated_text = response.json() - if "error" in generated_text: - raise ValueError( - f"Error raised by inference API: {generated_text['error']}" - ) - if self.task == "text-generation": - text = generated_text[0]["generated_text"] - # Remove prompt if included in generated text. - if text.startswith(prompt): - text = text[len(prompt) :] - elif self.task == "text2text-generation": - text = generated_text[0]["generated_text"] - elif self.task == "summarization": - text = generated_text[0]["summary_text"] - elif self.task == "conversational": - text = generated_text["response"][1] - else: - raise ValueError( - f"Got invalid task {self.task}, " - f"currently only {VALID_TASKS} are supported" - ) - if stop is not None: - # This is a bit hacky, but I can't figure out a better way to enforce - # stop tokens when making calls to huggingface_hub. - text = enforce_stop_tokens(text, stop) - return text + # yield text, if any + if text: + chunk = GenerationChunk(text=text) + yield chunk + if run_manager: + run_manager.on_llm_new_token(chunk.text) + + # break if stop sequence found + if stop_seq_found: + break + + async def _astream( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> AsyncIterator[GenerationChunk]: + invocation_params = self._invocation_params(stop, **kwargs) + async for response in await self.async_client.text_generation( + prompt, **invocation_params, stream=True + ): + # identify stop sequence in generated text, if any + stop_seq_found: Optional[str] = None + for stop_seq in invocation_params["stop_sequences"]: + if stop_seq in response: + stop_seq_found = stop_seq + + # identify text to yield + text: Optional[str] = None + if stop_seq_found: + text = response[: response.index(stop_seq_found)] + else: + text = response + + # yield text, if any + if text: + chunk = GenerationChunk(text=text) + yield chunk + if run_manager: + await run_manager.on_llm_new_token(chunk.text) + + # break if stop sequence found + if stop_seq_found: + break diff --git a/libs/community/langchain_community/llms/huggingface_hub.py b/libs/community/langchain_community/llms/huggingface_hub.py index 2d91231775231..2a5deaf766d61 100644 --- a/libs/community/langchain_community/llms/huggingface_hub.py +++ b/libs/community/langchain_community/llms/huggingface_hub.py @@ -1,6 +1,7 @@ import json from typing import Any, Dict, List, Mapping, Optional +from langchain_core._api.deprecation import deprecated from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models.llms import LLM from langchain_core.pydantic_v1 import Extra, root_validator @@ -19,8 +20,10 @@ } +@deprecated("0.0.21", removal="0.2.0", alternative="HuggingFaceEndpoint") class HuggingFaceHub(LLM): """HuggingFaceHub models. + ! This class is deprecated, you should use HuggingFaceEndpoint instead. To use, you should have the ``huggingface_hub`` python package installed, and the environment variable ``HUGGINGFACEHUB_API_TOKEN`` set with your API token, or pass diff --git a/libs/community/langchain_community/llms/huggingface_pipeline.py b/libs/community/langchain_community/llms/huggingface_pipeline.py index 388ba117c25ec..7a2b915054fb5 100644 --- a/libs/community/langchain_community/llms/huggingface_pipeline.py +++ b/libs/community/langchain_community/llms/huggingface_pipeline.py @@ -9,8 +9,6 @@ from langchain_core.outputs import Generation, LLMResult from langchain_core.pydantic_v1 import Extra -from langchain_community.llms.utils import enforce_stop_tokens - DEFAULT_MODEL_ID = "gpt2" DEFAULT_TASK = "text-generation" VALID_TASKS = ("text2text-generation", "text-generation", "summarization") @@ -201,7 +199,12 @@ def _generate( batch_prompts = prompts[i : i + self.batch_size] # Process batch of prompts - responses = self.pipeline(batch_prompts, **pipeline_kwargs) + responses = self.pipeline( + batch_prompts, + stop_sequence=stop, + return_full_text=False, + **pipeline_kwargs, + ) # Process each response in the batch for j, response in enumerate(responses): @@ -210,23 +213,7 @@ def _generate( response = response[0] if self.pipeline.task == "text-generation": - try: - from transformers.pipelines.text_generation import ReturnType - - remove_prompt = ( - self.pipeline._postprocess_params.get("return_type") - != ReturnType.NEW_TEXT - ) - except Exception as e: - logger.warning( - f"Unable to extract pipeline return_type. " - f"Received error:\n\n{e}" - ) - remove_prompt = True - if remove_prompt: - text = response["generated_text"][len(batch_prompts[j]) :] - else: - text = response["generated_text"] + text = response["generated_text"] elif self.pipeline.task == "text2text-generation": text = response["generated_text"] elif self.pipeline.task == "summarization": @@ -236,9 +223,6 @@ def _generate( f"Got invalid task {self.pipeline.task}, " f"currently only {VALID_TASKS} are supported" ) - if stop: - # Enforce stop tokens - text = enforce_stop_tokens(text, stop) # Append the processed text to results text_generations.append(text) diff --git a/libs/community/langchain_community/llms/huggingface_text_gen_inference.py b/libs/community/langchain_community/llms/huggingface_text_gen_inference.py index e03b6f7adcf83..9f56a949c6b94 100644 --- a/libs/community/langchain_community/llms/huggingface_text_gen_inference.py +++ b/libs/community/langchain_community/llms/huggingface_text_gen_inference.py @@ -1,6 +1,7 @@ import logging from typing import Any, AsyncIterator, Dict, Iterator, List, Optional +from langchain_core._api.deprecation import deprecated from langchain_core.callbacks import ( AsyncCallbackManagerForLLMRun, CallbackManagerForLLMRun, @@ -13,9 +14,11 @@ logger = logging.getLogger(__name__) +@deprecated("0.0.21", removal="0.2.0", alternative="HuggingFaceEndpoint") class HuggingFaceTextGenInference(LLM): """ HuggingFace text generation API. + ! This class is deprecated, you should use HuggingFaceEndpoint instead ! To use, you should have the `text-generation` python package installed and a text-generation server running. diff --git a/libs/community/langchain_community/llms/llamafile.py b/libs/community/langchain_community/llms/llamafile.py new file mode 100644 index 0000000000000..5be6f4f211865 --- /dev/null +++ b/libs/community/langchain_community/llms/llamafile.py @@ -0,0 +1,318 @@ +from __future__ import annotations + +import json +from io import StringIO +from typing import Any, Dict, Iterator, List, Optional + +import requests +from langchain_core.callbacks.manager import CallbackManagerForLLMRun +from langchain_core.language_models.llms import LLM +from langchain_core.outputs import GenerationChunk +from langchain_core.pydantic_v1 import Extra +from langchain_core.utils import get_pydantic_field_names + + +class Llamafile(LLM): + """Llamafile lets you distribute and run large language models with a + single file. + + To get started, see: https://github.com/Mozilla-Ocho/llamafile + + To use this class, you will need to first: + + 1. Download a llamafile. + 2. Make the downloaded file executable: `chmod +x path/to/model.llamafile` + 3. Start the llamafile in server mode: + + `./path/to/model.llamafile --server --nobrowser` + + Example: + .. code-block:: python + + from langchain_community.llms import Llamafile + llm = Llamafile() + llm.invoke("Tell me a joke.") + """ + + base_url: str = "http://localhost:8080" + """Base url where the llamafile server is listening.""" + + request_timeout: Optional[int] = None + """Timeout for server requests""" + + streaming: bool = False + """Allows receiving each predicted token in real-time instead of + waiting for the completion to finish. To enable this, set to true.""" + + # Generation options + + seed: int = -1 + """Random Number Generator (RNG) seed. A random seed is used if this is + less than zero. Default: -1""" + + temperature: float = 0.8 + """Temperature. Default: 0.8""" + + top_k: int = 40 + """Limit the next token selection to the K most probable tokens. + Default: 40.""" + + top_p: float = 0.95 + """Limit the next token selection to a subset of tokens with a cumulative + probability above a threshold P. Default: 0.95.""" + + min_p: float = 0.05 + """The minimum probability for a token to be considered, relative to + the probability of the most likely token. Default: 0.05.""" + + n_predict: int = -1 + """Set the maximum number of tokens to predict when generating text. + Note: May exceed the set limit slightly if the last token is a partial + multibyte character. When 0, no tokens will be generated but the prompt + is evaluated into the cache. Default: -1 = infinity.""" + + n_keep: int = 0 + """Specify the number of tokens from the prompt to retain when the + context size is exceeded and tokens need to be discarded. By default, + this value is set to 0 (meaning no tokens are kept). Use -1 to retain all + tokens from the prompt.""" + + tfs_z: float = 1.0 + """Enable tail free sampling with parameter z. Default: 1.0 = disabled.""" + + typical_p: float = 1.0 + """Enable locally typical sampling with parameter p. + Default: 1.0 = disabled.""" + + repeat_penalty: float = 1.1 + """Control the repetition of token sequences in the generated text. + Default: 1.1""" + + repeat_last_n: int = 64 + """Last n tokens to consider for penalizing repetition. Default: 64, + 0 = disabled, -1 = ctx-size.""" + + penalize_nl: bool = True + """Penalize newline tokens when applying the repeat penalty. + Default: true.""" + + presence_penalty: float = 0.0 + """Repeat alpha presence penalty. Default: 0.0 = disabled.""" + + frequency_penalty: float = 0.0 + """Repeat alpha frequency penalty. Default: 0.0 = disabled""" + + mirostat: int = 0 + """Enable Mirostat sampling, controlling perplexity during text + generation. 0 = disabled, 1 = Mirostat, 2 = Mirostat 2.0. + Default: disabled.""" + + mirostat_tau: float = 5.0 + """Set the Mirostat target entropy, parameter tau. Default: 5.0.""" + + mirostat_eta: float = 0.1 + """Set the Mirostat learning rate, parameter eta. Default: 0.1.""" + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + + @property + def _llm_type(self) -> str: + return "llamafile" + + @property + def _param_fieldnames(self) -> List[str]: + # Return the list of fieldnames that will be passed as configurable + # generation options to the llamafile server. Exclude 'builtin' fields + # from the BaseLLM class like 'metadata' as well as fields that should + # not be passed in requests (base_url, request_timeout). + ignore_keys = [ + "base_url", + "cache", + "callback_manager", + "callbacks", + "metadata", + "name", + "request_timeout", + "streaming", + "tags", + "verbose", + ] + attrs = [ + k for k in get_pydantic_field_names(self.__class__) if k not in ignore_keys + ] + return attrs + + @property + def _default_params(self) -> Dict[str, Any]: + params = {} + for fieldname in self._param_fieldnames: + params[fieldname] = getattr(self, fieldname) + return params + + def _get_parameters( + self, stop: Optional[List[str]] = None, **kwargs: Any + ) -> Dict[str, Any]: + params = self._default_params + + # Only update keys that are already present in the default config. + # This way, we don't accidentally post unknown/unhandled key/values + # in the request to the llamafile server + for k, v in kwargs.items(): + if k in params: + params[k] = v + + if stop is not None and len(stop) > 0: + params["stop"] = stop + + if self.streaming: + params["stream"] = True + + return params + + def _call( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> str: + """Request prompt completion from the llamafile server and return the + output. + + Args: + prompt: The prompt to use for generation. + stop: A list of strings to stop generation when encountered. + run_manager: + **kwargs: Any additional options to pass as part of the + generation request. + + Returns: + The string generated by the model. + + """ + + if self.streaming: + with StringIO() as buff: + for chunk in self._stream( + prompt, stop=stop, run_manager=run_manager, **kwargs + ): + buff.write(chunk.text) + + text = buff.getvalue() + + return text + + else: + params = self._get_parameters(stop=stop, **kwargs) + payload = {"prompt": prompt, **params} + + try: + response = requests.post( + url=f"{self.base_url}/completion", + headers={ + "Content-Type": "application/json", + }, + json=payload, + stream=False, + timeout=self.request_timeout, + ) + except requests.exceptions.ConnectionError: + raise requests.exceptions.ConnectionError( + f"Could not connect to Llamafile server. Please make sure " + f"that a server is running at {self.base_url}." + ) + + response.raise_for_status() + response.encoding = "utf-8" + + text = response.json()["content"] + + return text + + def _stream( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[GenerationChunk]: + """Yields results objects as they are generated in real time. + + It also calls the callback manager's on_llm_new_token event with + similar parameters to the OpenAI LLM class method of the same name. + + Args: + prompt: The prompts to pass into the model. + stop: Optional list of stop words to use when generating. + run_manager: + **kwargs: Any additional options to pass as part of the + generation request. + + Returns: + A generator representing the stream of tokens being generated. + + Yields: + Dictionary-like objects each containing a token + + Example: + .. code-block:: python + + from langchain_community.llms import Llamafile + llm = Llamafile( + temperature = 0.0 + ) + for chunk in llm.stream("Ask 'Hi, how are you?' like a pirate:'", + stop=["'","\n"]): + result = chunk["choices"][0] + print(result["text"], end='', flush=True) + + """ + params = self._get_parameters(stop=stop, **kwargs) + if "stream" not in params: + params["stream"] = True + + payload = {"prompt": prompt, **params} + + try: + response = requests.post( + url=f"{self.base_url}/completion", + headers={ + "Content-Type": "application/json", + }, + json=payload, + stream=True, + timeout=self.request_timeout, + ) + except requests.exceptions.ConnectionError: + raise requests.exceptions.ConnectionError( + f"Could not connect to Llamafile server. Please make sure " + f"that a server is running at {self.base_url}." + ) + + response.encoding = "utf8" + + for raw_chunk in response.iter_lines(decode_unicode=True): + content = self._get_chunk_content(raw_chunk) + chunk = GenerationChunk(text=content) + yield chunk + if run_manager: + run_manager.on_llm_new_token(token=chunk.text) + + def _get_chunk_content(self, chunk: str) -> str: + """When streaming is turned on, llamafile server returns lines like: + + 'data: {"content":" They","multimodal":true,"slot_id":0,"stop":false}' + + Here, we convert this to a dict and return the value of the 'content' + field + """ + + if chunk.startswith("data:"): + cleaned = chunk.lstrip("data: ") + data = json.loads(cleaned) + return data["content"] + else: + return chunk diff --git a/libs/community/langchain_community/llms/loading.py b/libs/community/langchain_community/llms/loading.py index c9621b784df60..f410f7b7c6c2a 100644 --- a/libs/community/langchain_community/llms/loading.py +++ b/libs/community/langchain_community/llms/loading.py @@ -35,7 +35,7 @@ def load_llm(file: Union[str, Path]) -> BaseLLM: if file_path.suffix == ".json": with open(file_path) as f: config = json.load(f) - elif file_path.suffix == ".yaml": + elif file_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "r") as f: config = yaml.safe_load(f) else: diff --git a/libs/community/langchain_community/llms/openllm.py b/libs/community/langchain_community/llms/openllm.py index afb5a18f9ba45..fa3b03e1f98d5 100644 --- a/libs/community/langchain_community/llms/openllm.py +++ b/libs/community/langchain_community/llms/openllm.py @@ -72,7 +72,7 @@ class OpenLLM(LLM): from langchain_community.llms import OpenLLM llm = OpenLLM(server_url='http://localhost:3000') - llm("What is the difference between a duck and a goose?") + llm.invoke("What is the difference between a duck and a goose?") """ model_name: Optional[str] = None @@ -82,6 +82,8 @@ class OpenLLM(LLM): See 'openllm models' for all available model variants.""" server_url: Optional[str] = None """Optional server URL that currently runs a LLMServer with 'openllm start'.""" + timeout: int = 30 + """"Time out for the openllm client""" server_type: ServerType = "http" """Optional server type. Either 'http' or 'grpc'.""" embedded: bool = True @@ -125,6 +127,7 @@ def __init__( *, model_id: Optional[str] = None, server_url: Optional[str] = None, + timeout: int = 30, server_type: Literal["grpc", "http"] = "http", embedded: bool = True, **llm_kwargs: Any, @@ -149,11 +152,12 @@ def __init__( if server_type == "http" else openllm.client.GrpcClient ) - client = client_cls(server_url) + client = client_cls(server_url, timeout) super().__init__( **{ "server_url": server_url, + "timeout": timeout, "server_type": server_type, "llm_kwargs": llm_kwargs, } @@ -217,9 +221,9 @@ def chat(input_text: str): def _identifying_params(self) -> IdentifyingParams: """Get the identifying parameters.""" if self._client is not None: - self.llm_kwargs.update(self._client._config()) - model_name = self._client._metadata()["model_name"] - model_id = self._client._metadata()["model_id"] + self.llm_kwargs.update(self._client._config) + model_name = self._client._metadata.model_dump()["model_name"] + model_id = self._client._metadata.model_dump()["model_id"] else: if self._runner is None: raise ValueError("Runner must be initialized.") @@ -265,9 +269,11 @@ def _call( self._identifying_params["model_name"], **copied ) if self._client: - res = self._client.generate( - prompt, **config.model_dump(flatten=True) - ).responses[0] + res = ( + self._client.generate(prompt, **config.model_dump(flatten=True)) + .outputs[0] + .text + ) else: assert self._runner is not None res = self._runner(prompt, **config.model_dump(flatten=True)) diff --git a/libs/community/langchain_community/llms/watsonxllm.py b/libs/community/langchain_community/llms/watsonxllm.py index aaea7cd06f74c..191574e094b06 100644 --- a/libs/community/langchain_community/llms/watsonxllm.py +++ b/libs/community/langchain_community/llms/watsonxllm.py @@ -2,6 +2,7 @@ import os from typing import Any, Dict, Iterator, List, Mapping, Optional, Union +from langchain_core._api.deprecation import deprecated from langchain_core.callbacks import CallbackManagerForLLMRun from langchain_core.language_models.llms import BaseLLM from langchain_core.outputs import Generation, GenerationChunk, LLMResult @@ -11,6 +12,9 @@ logger = logging.getLogger(__name__) +@deprecated( + since="0.0.18", removal="0.2", alternative_import="langchain_ibm.WatsonxLLM" +) class WatsonxLLM(BaseLLM): """ IBM watsonx.ai large language models. @@ -293,9 +297,6 @@ def _stream_response_to_generation_chunk( generation_info=dict( finish_reason=stream_response["results"][0].get("stop_reason", None), llm_output={ - "generated_token_count": stream_response["results"][0].get( - "generated_token_count", None - ), "model_id": self.model_id, "deployment_id": self.deployment_id, }, @@ -397,7 +398,7 @@ def _stream( prompt=prompt, raw_response=True, params=params ): chunk = self._stream_response_to_generation_chunk(stream_resp) - yield chunk if run_manager: run_manager.on_llm_new_token(chunk.text, chunk=chunk) + yield chunk diff --git a/libs/community/langchain_community/llms/yuan2.py b/libs/community/langchain_community/llms/yuan2.py new file mode 100644 index 0000000000000..360418a0a0f5a --- /dev/null +++ b/libs/community/langchain_community/llms/yuan2.py @@ -0,0 +1,192 @@ +import json +import logging +from typing import Any, Dict, List, Mapping, Optional, Set + +import requests +from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.language_models.llms import LLM +from langchain_core.pydantic_v1 import Field + +from langchain_community.llms.utils import enforce_stop_tokens + +logger = logging.getLogger(__name__) + + +class Yuan2(LLM): + """Yuan2.0 language models. + + Example: + .. code-block:: python + + yuan_llm = Yuan2( + infer_api="http://127.0.0.1:8000/yuan", + max_tokens=1024, + temp=1.0, + top_p=0.9, + top_k=40, + ) + print(yuan_llm) + print(yuan_llm("ไฝ ๆ˜ฏ่ฐ๏ผŸ")) + """ + + infer_api: str = "http://127.0.0.1:8000/yuan" + """Yuan2.0 inference api""" + + max_tokens: int = Field(1024, alias="max_token") + """Token context window.""" + + temp: Optional[float] = 0.7 + """The temperature to use for sampling.""" + + top_p: Optional[float] = 0.9 + """The top-p value to use for sampling.""" + + top_k: Optional[int] = 40 + """The top-k value to use for sampling.""" + + do_sample: bool = False + """The do_sample is a Boolean value that determines whether + to use the sampling method during text generation. + """ + + echo: Optional[bool] = False + """Whether to echo the prompt.""" + + stop: Optional[List[str]] = [] + """A list of strings to stop generation when encountered.""" + + repeat_last_n: Optional[int] = 64 + "Last n tokens to penalize" + + repeat_penalty: Optional[float] = 1.18 + """The penalty to apply to repeated tokens.""" + + streaming: bool = False + """Whether to stream the results or not.""" + + history: List[str] = [] + """History of the conversation""" + + use_history: bool = False + """Whether to use history or not""" + + @property + def _llm_type(self) -> str: + return "Yuan2.0" + + @staticmethod + def _model_param_names() -> Set[str]: + return { + "max_tokens", + "temp", + "top_k", + "top_p", + "do_sample", + } + + def _default_params(self) -> Dict[str, Any]: + return { + "infer_api": self.infer_api, + "max_tokens": self.max_tokens, + "temp": self.temp, + "top_k": self.top_k, + "top_p": self.top_p, + "do_sample": self.do_sample, + "use_history": self.use_history, + } + + @property + def _identifying_params(self) -> Mapping[str, Any]: + """Get the identifying parameters.""" + return { + "model": self._llm_type, + **self._default_params(), + **{ + k: v for k, v in self.__dict__.items() if k in self._model_param_names() + }, + } + + def _call( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> str: + """Call out to a Yuan2.0 LLM inference endpoint. + + Args: + prompt: The prompt to pass into the model. + stop: Optional list of stop words to use when generating. + + Returns: + The string generated by the model. + + Example: + .. code-block:: python + + response = yuan_llm("ไฝ ่ƒฝๅšไป€ไนˆ?") + """ + + if self.use_history: + self.history.append(prompt) + input = "".join(self.history) + else: + input = prompt + + headers = {"Content-Type": "application/json"} + data = json.dumps( + { + "ques_list": [{"id": "000", "ques": input}], + "tokens_to_generate": self.max_tokens, + "temperature": self.temp, + "top_p": self.top_p, + "top_k": self.top_k, + "do_sample": self.do_sample, + } + ) + + logger.debug("Yuan2.0 prompt:", input) + + # call api + try: + response = requests.put(self.infer_api, headers=headers, data=data) + except requests.exceptions.RequestException as e: + raise ValueError(f"Error raised by inference api: {e}") + + logger.debug(f"Yuan2.0 response: {response}") + + if response.status_code != 200: + raise ValueError(f"Failed with response: {response}") + try: + resp = response.json() + + if resp["errCode"] != "0": + raise ValueError( + f"Failed with error code [{resp['errCode']}], " + f"error message: [{resp['errMessage']}]" + ) + + if "resData" in resp: + if len(resp["resData"]["output"]) >= 0: + generate_text = resp["resData"]["output"][0]["ans"] + else: + raise ValueError("No output found in response.") + else: + raise ValueError("No resData found in response.") + + except requests.exceptions.JSONDecodeError as e: + raise ValueError( + f"Error raised during decoding response from inference api: {e}." + f"\nResponse: {response.text}" + ) + + if stop is not None: + generate_text = enforce_stop_tokens(generate_text, stop) + + # support multi-turn chat + if self.use_history: + self.history.append(generate_text) + + logger.debug(f"history: {self.history}") + return generate_text diff --git a/libs/community/langchain_community/storage/__init__.py b/libs/community/langchain_community/storage/__init__.py index 494591b03c713..ffb95cab1d1ed 100644 --- a/libs/community/langchain_community/storage/__init__.py +++ b/libs/community/langchain_community/storage/__init__.py @@ -10,6 +10,7 @@ AstraDBByteStore, AstraDBStore, ) +from langchain_community.storage.mongodb import MongoDBStore from langchain_community.storage.redis import RedisStore from langchain_community.storage.upstash_redis import ( UpstashRedisByteStore, @@ -19,6 +20,7 @@ __all__ = [ "AstraDBStore", "AstraDBByteStore", + "MongoDBStore", "RedisStore", "UpstashRedisByteStore", "UpstashRedisStore", diff --git a/libs/community/langchain_community/storage/astradb.py b/libs/community/langchain_community/storage/astradb.py index f84ae1721c837..959ef374124c7 100644 --- a/libs/community/langchain_community/storage/astradb.py +++ b/libs/community/langchain_community/storage/astradb.py @@ -5,6 +5,7 @@ from typing import ( TYPE_CHECKING, Any, + AsyncIterator, Generic, Iterator, List, @@ -16,10 +17,13 @@ from langchain_core.stores import BaseStore, ByteStore -from langchain_community.utilities.astradb import AstraDBEnvironment +from langchain_community.utilities.astradb import ( + SetupMode, + _AstraDBCollectionEnvironment, +) if TYPE_CHECKING: - from astrapy.db import AstraDB + from astrapy.db import AstraDB, AsyncAstraDB V = TypeVar("V") @@ -34,17 +38,23 @@ def __init__( api_endpoint: Optional[str] = None, astra_db_client: Optional[AstraDB] = None, namespace: Optional[str] = None, + *, + async_astra_db_client: Optional[AsyncAstraDB] = None, + pre_delete_collection: bool = False, + setup_mode: SetupMode = SetupMode.SYNC, ) -> None: - astra_env = AstraDBEnvironment( + self.astra_env = _AstraDBCollectionEnvironment( + collection_name=collection_name, token=token, api_endpoint=api_endpoint, astra_db_client=astra_db_client, + async_astra_db_client=async_astra_db_client, namespace=namespace, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, ) - self.astra_db = astra_env.astra_db - self.collection = self.astra_db.create_collection( - collection_name=collection_name, - ) + self.collection = self.astra_env.collection + self.async_collection = self.astra_env.async_collection @abstractmethod def decode_value(self, value: Any) -> Optional[V]: @@ -56,28 +66,63 @@ def encode_value(self, value: Optional[V]) -> Any: def mget(self, keys: Sequence[str]) -> List[Optional[V]]: """Get the values associated with the given keys.""" + self.astra_env.ensure_db_setup() docs_dict = {} for doc in self.collection.paginated_find(filter={"_id": {"$in": list(keys)}}): docs_dict[doc["_id"]] = doc.get("value") return [self.decode_value(docs_dict.get(key)) for key in keys] + async def amget(self, keys: Sequence[str]) -> List[Optional[V]]: + """Get the values associated with the given keys.""" + await self.astra_env.aensure_db_setup() + docs_dict = {} + async for doc in self.async_collection.paginated_find( + filter={"_id": {"$in": list(keys)}} + ): + docs_dict[doc["_id"]] = doc.get("value") + return [self.decode_value(docs_dict.get(key)) for key in keys] + def mset(self, key_value_pairs: Sequence[Tuple[str, V]]) -> None: """Set the given key-value pairs.""" + self.astra_env.ensure_db_setup() for k, v in key_value_pairs: self.collection.upsert({"_id": k, "value": self.encode_value(v)}) + async def amset(self, key_value_pairs: Sequence[Tuple[str, V]]) -> None: + """Set the given key-value pairs.""" + await self.astra_env.aensure_db_setup() + for k, v in key_value_pairs: + await self.async_collection.upsert( + {"_id": k, "value": self.encode_value(v)} + ) + def mdelete(self, keys: Sequence[str]) -> None: """Delete the given keys.""" + self.astra_env.ensure_db_setup() self.collection.delete_many(filter={"_id": {"$in": list(keys)}}) + async def amdelete(self, keys: Sequence[str]) -> None: + """Delete the given keys.""" + await self.astra_env.aensure_db_setup() + await self.async_collection.delete_many(filter={"_id": {"$in": list(keys)}}) + def yield_keys(self, *, prefix: Optional[str] = None) -> Iterator[str]: """Yield keys in the store.""" + self.astra_env.ensure_db_setup() docs = self.collection.paginated_find() for doc in docs: key = doc["_id"] if not prefix or key.startswith(prefix): yield key + async def ayield_keys(self, *, prefix: Optional[str] = None) -> AsyncIterator[str]: + """Yield keys in the store.""" + await self.astra_env.aensure_db_setup() + async for doc in self.async_collection.paginated_find(): + key = doc["_id"] + if not prefix or key.startswith(prefix): + yield key + class AstraDBStore(AstraDBBaseStore[Any]): """BaseStore implementation using DataStax AstraDB as the underlying store. diff --git a/libs/community/langchain_community/storage/mongodb.py b/libs/community/langchain_community/storage/mongodb.py new file mode 100644 index 0000000000000..97447f832175e --- /dev/null +++ b/libs/community/langchain_community/storage/mongodb.py @@ -0,0 +1,126 @@ +from typing import Iterator, List, Optional, Sequence, Tuple + +from langchain_core.documents import Document +from langchain_core.stores import BaseStore + + +class MongoDBStore(BaseStore[str, Document]): + """BaseStore implementation using MongoDB as the underlying store. + + Examples: + Create a MongoDBStore instance and perform operations on it: + + .. code-block:: python + + # Instantiate the MongoDBStore with a MongoDB connection + from langchain.storage import MongoDBStore + + mongo_conn_str = "mongodb://localhost:27017/" + mongodb_store = MongoDBStore(mongo_conn_str, db_name="test-db", + collection_name="test-collection") + + # Set values for keys + doc1 = Document(...) + doc2 = Document(...) + mongodb_store.mset([("key1", doc1), ("key2", doc2)]) + + # Get values for keys + values = mongodb_store.mget(["key1", "key2"]) + # [doc1, doc2] + + # Iterate over keys + for key in mongodb_store.yield_keys(): + print(key) + + # Delete keys + mongodb_store.mdelete(["key1", "key2"]) + """ + + def __init__( + self, + connection_string: str, + db_name: str, + collection_name: str, + *, + client_kwargs: Optional[dict] = None, + ) -> None: + """Initialize the MongoDBStore with a MongoDB connection string. + + Args: + connection_string (str): MongoDB connection string + db_name (str): name to use + collection_name (str): collection name to use + client_kwargs (dict): Keyword arguments to pass to the Mongo client + """ + try: + from pymongo import MongoClient + except ImportError as e: + raise ImportError( + "The MongoDBStore requires the pymongo library to be " + "installed. " + "pip install pymongo" + ) from e + + if not connection_string: + raise ValueError("connection_string must be provided.") + if not db_name: + raise ValueError("db_name must be provided.") + if not collection_name: + raise ValueError("collection_name must be provided.") + + self.client = MongoClient(connection_string, **(client_kwargs or {})) + self.collection = self.client[db_name][collection_name] + + def mget(self, keys: Sequence[str]) -> List[Optional[Document]]: + """Get the list of documents associated with the given keys. + + Args: + keys (list[str]): A list of keys representing Document IDs.. + + Returns: + list[Document]: A list of Documents corresponding to the provided + keys, where each Document is either retrieved successfully or + represented as None if not found. + """ + result = self.collection.find({"_id": {"$in": keys}}) + result_dict = {doc["_id"]: Document(**doc["value"]) for doc in result} + return [result_dict.get(key) for key in keys] + + def mset(self, key_value_pairs: Sequence[Tuple[str, Document]]) -> None: + """Set the given key-value pairs. + + Args: + key_value_pairs (list[tuple[str, Document]]): A list of id-document + pairs. + Returns: + None + """ + from pymongo import UpdateOne + + updates = [{"_id": k, "value": v.__dict__} for k, v in key_value_pairs] + self.collection.bulk_write( + [UpdateOne({"_id": u["_id"]}, {"$set": u}, upsert=True) for u in updates] + ) + + def mdelete(self, keys: Sequence[str]) -> None: + """Delete the given ids. + + Args: + keys (list[str]): A list of keys representing Document IDs.. + """ + self.collection.delete_many({"_id": {"$in": keys}}) + + def yield_keys(self, prefix: Optional[str] = None) -> Iterator[str]: + """Yield keys in the store. + + Args: + prefix (str): prefix of keys to retrieve. + """ + if prefix is None: + for doc in self.collection.find(projection=["_id"]): + yield doc["_id"] + else: + for doc in self.collection.find( + {"_id": {"$regex": f"^{prefix}"}}, projection=["_id"] + ): + yield doc["_id"] diff --git a/libs/community/langchain_community/tools/__init__.py b/libs/community/langchain_community/tools/__init__.py index 3456ef10bc1e6..59ad157de5bf8 100644 --- a/libs/community/langchain_community/tools/__init__.py +++ b/libs/community/langchain_community/tools/__init__.py @@ -118,6 +118,32 @@ def _import_brave_search_tool() -> Any: return BraveSearch +def _import_cogniswitch_store_file_tool() -> Any: + from langchain_community.tools.cogniswitch.tool import ( + CogniswitchKnowledgeSourceFile, + ) + + return CogniswitchKnowledgeSourceFile + + +def _import_cogniswitch_store_url_tool() -> Any: + from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeSourceURL + + return CogniswitchKnowledgeSourceURL + + +def _import_cogniswitch_answer_tool() -> Any: + from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeRequest + + return CogniswitchKnowledgeRequest + + +def _import_cogniswitch_knowledge_status_tool() -> Any: + from langchain_community.tools.cogniswitch.tool import CogniswitchKnowledgeStatus + + return CogniswitchKnowledgeStatus + + def _import_connery_tool() -> Any: from langchain_community.tools.connery import ConneryAction @@ -803,6 +829,14 @@ def __getattr__(name: str) -> Any: return _import_bing_search_tool_BingSearchRun() elif name == "BraveSearch": return _import_brave_search_tool() + elif name == "CogniswitchKnowledgeSourceFile": + return _import_cogniswitch_store_file_tool() + elif name == "CogniswitchKnowledgeSourceURL": + return _import_cogniswitch_store_url_tool() + elif name == "CogniswitchKnowledgeRequest": + return _import_cogniswitch_answer_tool() + elif name == "CogniswitchKnowledgeStatus": + return _import_cogniswitch_knowledge_status_tool() elif name == "ConneryAction": return _import_connery_tool() elif name == "DuckDuckGoSearchResults": @@ -1043,6 +1077,10 @@ def __getattr__(name: str) -> Any: "BingSearchRun", "BraveSearch", "ClickTool", + "CogniswitchKnowledgeSourceFile", + "CogniswitchKnowledgeSourceURL", + "CogniswitchKnowledgeRequest", + "CogniswitchKnowledgeStatus", "ConneryAction", "CopyFileTool", "CurrentWebPageTool", diff --git a/libs/community/langchain_community/tools/cogniswitch/__init__.py b/libs/community/langchain_community/tools/cogniswitch/__init__.py new file mode 100644 index 0000000000000..3a89a8d7d3a9a --- /dev/null +++ b/libs/community/langchain_community/tools/cogniswitch/__init__.py @@ -0,0 +1 @@ +"Cogniswitch Tools" diff --git a/libs/community/langchain_community/tools/cogniswitch/tool.py b/libs/community/langchain_community/tools/cogniswitch/tool.py new file mode 100644 index 0000000000000..e2878e6ed544e --- /dev/null +++ b/libs/community/langchain_community/tools/cogniswitch/tool.py @@ -0,0 +1,399 @@ +from __future__ import annotations + +from typing import Any, Dict, Optional + +import requests +from langchain_core.callbacks import CallbackManagerForToolRun +from langchain_core.tools import BaseTool + + +class CogniswitchKnowledgeRequest(BaseTool): + """ + A tool for interacting with the Cogniswitch service to answer questions. + name: str = "cogniswitch_knowledge_request" + description: str = ( + "A wrapper around cogniswitch service to answer the question + from the knowledge base." + "Input should be a search query." + ) + """ + + name: str = "cogniswitch_knowledge_request" + description: str = """A wrapper around cogniswitch service to + answer the question from the knowledge base.""" + cs_token: str + OAI_token: str + apiKey: str + api_url = "https://api.cogniswitch.ai:8243/cs-api/0.0.1/cs/knowledgeRequest" + + def _run( + self, + query: str, + run_manager: Optional[CallbackManagerForToolRun] = None, + ) -> Dict[str, Any]: + """ + Use the tool to answer a query. + + Args: + query (str): Natural language query, + that you would like to ask to your knowledge graph. + run_manager (Optional[CallbackManagerForChainRun]): + Manager for chain run callbacks. + + Returns: + Dict[str, Any]: Output dictionary containing + the 'response' from the service. + """ + response = self.answer_cs(self.cs_token, self.OAI_token, query, self.apiKey) + return response + + def answer_cs(self, cs_token: str, OAI_token: str, query: str, apiKey: str) -> dict: + """ + Send a query to the Cogniswitch service and retrieve the response. + + Args: + cs_token (str): Cogniswitch token. + OAI_token (str): OpenAI token. + apiKey (str): OAuth token. + query (str): Query to be answered. + + Returns: + dict: Response JSON from the Cogniswitch service. + """ + if not cs_token: + raise ValueError("Missing cs_token") + if not OAI_token: + raise ValueError("Missing OpenAI token") + if not apiKey: + raise ValueError("Missing cogniswitch OAuth token") + if not query: + raise ValueError("Missing input query") + + headers = { + "apiKey": apiKey, + "platformToken": cs_token, + "openAIToken": OAI_token, + } + + data = {"query": query} + response = requests.post(self.api_url, headers=headers, verify=False, data=data) + return response.json() + + +class CogniswitchKnowledgeStatus(BaseTool): + """ + A cogniswitch tool for interacting with the Cogniswitch services to know the + status of the document or url uploaded. + name: str = "cogniswitch_knowledge_status" + description: str = ( + "A wrapper around cogniswitch services to know the status of + the document uploaded from a url or a file. " + "Input should be a file name or the url link" + ) + """ + + name: str = "cogniswitch_knowledge_status" + description: str = """A wrapper around cogniswitch services to know + the status of the document uploaded from a url or a file.""" + cs_token: str + OAI_token: str + apiKey: str + knowledge_status_url = ( + "https://api.cogniswitch.ai:8243/cs-api/0.0.1/cs/knowledgeSource/status" + ) + + def _run( + self, + document_name: str, + run_manager: Optional[CallbackManagerForToolRun] = None, + ) -> Dict[str, Any]: + """ + Use the tool to know the status of the document uploaded. + + Args: + document_name (str): name of the document or + the url uploaded + run_manager (Optional[CallbackManagerForChainRun]): + Manager for chain run callbacks. + + Returns: + Dict[str, Any]: Output dictionary containing + the 'response' from the service. + """ + response = self.knowledge_status(document_name) + return response + + def knowledge_status(self, document_name: str) -> dict: + """ + Use this function to know the status of the document or the URL uploaded + Args: + document_name (str): The document name or the url that is uploaded. + + Returns: + dict: Response JSON from the Cogniswitch service. + """ + + params = {"docName": document_name, "platformToken": self.cs_token} + headers = { + "apiKey": self.apiKey, + "openAIToken": self.OAI_token, + "platformToken": self.cs_token, + } + response = requests.get( + self.knowledge_status_url, + headers=headers, + params=params, + verify=False, + ) + if response.status_code == 200: + source_info = response.json() + source_data = dict(source_info[-1]) + status = source_data.get("status") + if status == 0: + source_data["status"] = "SUCCESS" + elif status == 1: + source_data["status"] = "PROCESSING" + elif status == 2: + source_data["status"] = "UPLOADED" + elif status == 3: + source_data["status"] = "FAILURE" + elif status == 4: + source_data["status"] = "UPLOAD_FAILURE" + elif status == 5: + source_data["status"] = "REJECTED" + + if "filePath" in source_data.keys(): + source_data.pop("filePath") + if "savedFileName" in source_data.keys(): + source_data.pop("savedFileName") + if "integrationConfigId" in source_data.keys(): + source_data.pop("integrationConfigId") + if "metaData" in source_data.keys(): + source_data.pop("metaData") + if "docEntryId" in source_data.keys(): + source_data.pop("docEntryId") + return source_data + else: + # error_message = response.json()["message"] + return { + "message": response.status_code, + } + + +class CogniswitchKnowledgeSourceFile(BaseTool): + """ + A cogniswitch tool for interacting with the Cogniswitch services to store data. + name: str = "cogniswitch_knowledge_source_file" + description: str = ( + "This calls the CogniSwitch services to analyze & store data from a file. + If the input looks like a file path, assign that string value to file key. + Assign document name & description only if provided in input." + ) + """ + + name: str = "cogniswitch_knowledge_source_file" + description: str = """ + This calls the CogniSwitch services to analyze & store data from a file. + If the input looks like a file path, assign that string value to file key. + Assign document name & description only if provided in input. + """ + cs_token: str + OAI_token: str + apiKey: str + knowledgesource_file = ( + "https://api.cogniswitch.ai:8243/cs-api/0.0.1/cs/knowledgeSource/file" + ) + + def _run( + self, + file: Optional[str] = None, + document_name: Optional[str] = None, + document_description: Optional[str] = None, + run_manager: Optional[CallbackManagerForToolRun] = None, + ) -> Dict[str, Any]: + """ + Execute the tool to store the data given from a file. + This calls the CogniSwitch services to analyze & store data from a file. + If the input looks like a file path, assign that string value to file key. + Assign document name & description only if provided in input. + + Args: + file Optional[str]: The file path of your knowledge + document_name Optional[str]: Name of your knowledge document + document_description Optional[str]: Description of your knowledge document + run_manager (Optional[CallbackManagerForChainRun]): + Manager for chain run callbacks. + + Returns: + Dict[str, Any]: Output dictionary containing + the 'response' from the service. + """ + if not file: + return { + "message": "No input provided", + } + else: + response = self.store_data( + file=file, + document_name=document_name, + document_description=document_description, + ) + return response + + def store_data( + self, + file: Optional[str], + document_name: Optional[str], + document_description: Optional[str], + ) -> dict: + """ + Store data using the Cogniswitch service. + This calls the CogniSwitch services to analyze & store data from a file. + If the input looks like a file path, assign that string value to file key. + Assign document name & description only if provided in input. + + Args: + file (Optional[str]): file path of your file. + the current files supported by the files are + .txt, .pdf, .docx, .doc, .html + document_name (Optional[str]): Name of the document you are uploading. + document_description (Optional[str]): Description of the document. + + Returns: + dict: Response JSON from the Cogniswitch service. + """ + headers = { + "apiKey": self.apiKey, + "openAIToken": self.OAI_token, + "platformToken": self.cs_token, + } + data: Dict[str, Any] + if not document_name: + document_name = "" + if not document_description: + document_description = "" + + if file is not None: + files = {"file": open(file, "rb")} + + data = { + "documentName": document_name, + "documentDescription": document_description, + } + response = requests.post( + self.knowledgesource_file, + headers=headers, + verify=False, + data=data, + files=files, + ) + if response.status_code == 200: + return response.json() + else: + return {"message": "Bad Request"} + + +class CogniswitchKnowledgeSourceURL(BaseTool): + """ + A cogniswitch tool for interacting with the Cogniswitch services to store data. + name: str = "cogniswitch_knowledge_source_url" + description: str = ( + "This calls the CogniSwitch services to analyze & store data from a url. + the URL is provided in input, assign that value to the url key. + Assign document name & description only if provided in input" + ) + """ + + name: str = "cogniswitch_knowledge_source_url" + description: str = """ + This calls the CogniSwitch services to analyze & store data from a url. + the URL is provided in input, assign that value to the url key. + Assign document name & description only if provided in input""" + cs_token: str + OAI_token: str + apiKey: str + knowledgesource_url = ( + "https://api.cogniswitch.ai:8243/cs-api/0.0.1/cs/knowledgeSource/url" + ) + + def _run( + self, + url: Optional[str] = None, + document_name: Optional[str] = None, + document_description: Optional[str] = None, + run_manager: Optional[CallbackManagerForToolRun] = None, + ) -> Dict[str, Any]: + """ + Execute the tool to store the data given from a url. + This calls the CogniSwitch services to analyze & store data from a url. + the URL is provided in input, assign that value to the url key. + Assign document name & description only if provided in input. + + Args: + url Optional[str]: The website/url link of your knowledge + document_name Optional[str]: Name of your knowledge document + document_description Optional[str]: Description of your knowledge document + run_manager (Optional[CallbackManagerForChainRun]): + Manager for chain run callbacks. + + Returns: + Dict[str, Any]: Output dictionary containing + the 'response' from the service. + """ + if not url: + return { + "message": "No input provided", + } + response = self.store_data( + url=url, + document_name=document_name, + document_description=document_description, + ) + return response + + def store_data( + self, + url: Optional[str], + document_name: Optional[str], + document_description: Optional[str], + ) -> dict: + """ + Store data using the Cogniswitch service. + This calls the CogniSwitch services to analyze & store data from a url. + the URL is provided in input, assign that value to the url key. + Assign document name & description only if provided in input. + + Args: + url (Optional[str]): URL link. + document_name (Optional[str]): Name of the document you are uploading. + document_description (Optional[str]): Description of the document. + + Returns: + dict: Response JSON from the Cogniswitch service. + """ + headers = { + "apiKey": self.apiKey, + "openAIToken": self.OAI_token, + "platformToken": self.cs_token, + } + data: Dict[str, Any] + if not document_name: + document_name = "" + if not document_description: + document_description = "" + if not url: + return { + "message": "No input provided", + } + else: + data = {"url": url} + response = requests.post( + self.knowledgesource_url, + headers=headers, + verify=False, + data=data, + ) + if response.status_code == 200: + return response.json() + else: + return {"message": "Bad Request"} diff --git a/libs/community/langchain_community/tools/sql_database/tool.py b/libs/community/langchain_community/tools/sql_database/tool.py index 4c7fe4726a055..e68923bdcfbd3 100644 --- a/libs/community/langchain_community/tools/sql_database/tool.py +++ b/libs/community/langchain_community/tools/sql_database/tool.py @@ -2,7 +2,7 @@ """Tools for interacting with a SQL database.""" from typing import Any, Dict, Optional, Sequence, Type, Union -from sqlalchemy import Result +from sqlalchemy.engine import Result from langchain_core.pydantic_v1 import BaseModel, Field, root_validator diff --git a/libs/community/langchain_community/tools/vectorstore/tool.py b/libs/community/langchain_community/tools/vectorstore/tool.py index e51deaeaa1dec..c0cbdb670627f 100644 --- a/libs/community/langchain_community/tools/vectorstore/tool.py +++ b/libs/community/langchain_community/tools/vectorstore/tool.py @@ -3,7 +3,10 @@ import json from typing import Any, Dict, Optional -from langchain_core.callbacks import CallbackManagerForToolRun +from langchain_core.callbacks import ( + AsyncCallbackManagerForToolRun, + CallbackManagerForToolRun, +) from langchain_core.language_models import BaseLanguageModel from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.tools import BaseTool @@ -51,9 +54,30 @@ def _run( chain = RetrievalQA.from_chain_type( self.llm, retriever=self.vectorstore.as_retriever() ) - return chain.run( - query, callbacks=run_manager.get_child() if run_manager else None + return chain.invoke( + {chain.input_key: query}, + config={"callbacks": [run_manager.get_child() if run_manager else None]}, + )[chain.output_key] + + async def _arun( + self, + query: str, + run_manager: Optional[AsyncCallbackManagerForToolRun] = None, + ) -> str: + """Use the tool asynchronously.""" + from langchain.chains.retrieval_qa.base import RetrievalQA + + chain = RetrievalQA.from_chain_type( + self.llm, retriever=self.vectorstore.as_retriever() ) + return ( + await chain.ainvoke( + {chain.input_key: query}, + config={ + "callbacks": [run_manager.get_child() if run_manager else None] + }, + ) + )[chain.output_key] class VectorStoreQAWithSourcesTool(BaseVectorStoreTool, BaseTool): @@ -87,7 +111,28 @@ def _run( self.llm, retriever=self.vectorstore.as_retriever() ) return json.dumps( - chain( + chain.invoke( + {chain.question_key: query}, + return_only_outputs=True, + callbacks=run_manager.get_child() if run_manager else None, + ) + ) + + async def _arun( + self, + query: str, + run_manager: Optional[AsyncCallbackManagerForToolRun] = None, + ) -> str: + """Use the tool asynchronously.""" + from langchain.chains.qa_with_sources.retrieval import ( + RetrievalQAWithSourcesChain, + ) + + chain = RetrievalQAWithSourcesChain.from_chain_type( + self.llm, retriever=self.vectorstore.as_retriever() + ) + return json.dumps( + await chain.ainvoke( {chain.question_key: query}, return_only_outputs=True, callbacks=run_manager.get_child() if run_manager else None, diff --git a/libs/community/langchain_community/utilities/astradb.py b/libs/community/langchain_community/utilities/astradb.py index 3ad3d3274974d..c113d660792b6 100644 --- a/libs/community/langchain_community/utilities/astradb.py +++ b/libs/community/langchain_community/utilities/astradb.py @@ -1,6 +1,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +import asyncio +import inspect +from asyncio import InvalidStateError, Task +from enum import Enum +from typing import TYPE_CHECKING, Awaitable, Optional, Union if TYPE_CHECKING: from astrapy.db import ( @@ -9,7 +13,13 @@ ) -class AstraDBEnvironment: +class SetupMode(Enum): + SYNC = 1 + ASYNC = 2 + OFF = 3 + + +class _AstraDBEnvironment: def __init__( self, token: Optional[str] = None, @@ -21,21 +31,20 @@ def __init__( self.token = token self.api_endpoint = api_endpoint astra_db = astra_db_client - self.async_astra_db = async_astra_db_client + async_astra_db = async_astra_db_client self.namespace = namespace - from astrapy import db - try: - from astrapy.db import AstraDB + from astrapy.db import ( + AstraDB, + AsyncAstraDB, + ) except (ImportError, ModuleNotFoundError): raise ImportError( "Could not import a recent astrapy python package. " "Please install it with `pip install --upgrade astrapy`." ) - supports_async = hasattr(db, "AsyncAstraDB") - # Conflicting-arg checks: if astra_db_client is not None or async_astra_db_client is not None: if token is not None or api_endpoint is not None: @@ -46,39 +55,115 @@ def __init__( if token and api_endpoint: astra_db = AstraDB( - token=self.token, - api_endpoint=self.api_endpoint, + token=token, + api_endpoint=api_endpoint, + namespace=self.namespace, + ) + async_astra_db = AsyncAstraDB( + token=token, + api_endpoint=api_endpoint, namespace=self.namespace, ) - if supports_async: - self.async_astra_db = db.AsyncAstraDB( - token=self.token, - api_endpoint=self.api_endpoint, - namespace=self.namespace, - ) if astra_db: self.astra_db = astra_db + if async_astra_db: + self.async_astra_db = async_astra_db + else: + self.async_astra_db = AsyncAstraDB( + token=self.astra_db.token, + api_endpoint=self.astra_db.base_url, + api_path=self.astra_db.api_path, + api_version=self.astra_db.api_version, + namespace=self.astra_db.namespace, + ) + elif async_astra_db: + self.async_astra_db = async_astra_db + self.astra_db = AstraDB( + token=self.async_astra_db.token, + api_endpoint=self.async_astra_db.base_url, + api_path=self.async_astra_db.api_path, + api_version=self.async_astra_db.api_version, + namespace=self.async_astra_db.namespace, + ) else: - if self.async_astra_db: - self.astra_db = AstraDB( - token=self.async_astra_db.token, - api_endpoint=self.async_astra_db.base_url, - api_path=self.async_astra_db.api_path, - api_version=self.async_astra_db.api_version, - namespace=self.async_astra_db.namespace, + raise ValueError( + "Must provide 'astra_db_client' or 'async_astra_db_client' or " + "'token' and 'api_endpoint'" + ) + + +class _AstraDBCollectionEnvironment(_AstraDBEnvironment): + def __init__( + self, + collection_name: str, + token: Optional[str] = None, + api_endpoint: Optional[str] = None, + astra_db_client: Optional[AstraDB] = None, + async_astra_db_client: Optional[AsyncAstraDB] = None, + namespace: Optional[str] = None, + setup_mode: SetupMode = SetupMode.SYNC, + pre_delete_collection: bool = False, + embedding_dimension: Union[int, Awaitable[int], None] = None, + metric: Optional[str] = None, + ) -> None: + from astrapy.db import AstraDBCollection, AsyncAstraDBCollection + + super().__init__( + token, api_endpoint, astra_db_client, async_astra_db_client, namespace + ) + self.collection_name = collection_name + self.collection = AstraDBCollection( + collection_name=collection_name, + astra_db=self.astra_db, + ) + + self.async_collection = AsyncAstraDBCollection( + collection_name=collection_name, + astra_db=self.async_astra_db, + ) + + self.async_setup_db_task: Optional[Task] = None + if setup_mode == SetupMode.ASYNC: + async_astra_db = self.async_astra_db + + async def _setup_db() -> None: + if pre_delete_collection: + await async_astra_db.delete_collection(collection_name) + if inspect.isawaitable(embedding_dimension): + dimension = await embedding_dimension + else: + dimension = embedding_dimension + await async_astra_db.create_collection( + collection_name, dimension=dimension, metric=metric ) - else: + + self.async_setup_db_task = asyncio.create_task(_setup_db()) + elif setup_mode == SetupMode.SYNC: + if pre_delete_collection: + self.astra_db.delete_collection(collection_name) + if inspect.isawaitable(embedding_dimension): raise ValueError( - "Must provide 'astra_db_client' or 'async_astra_db_client' or " - "'token' and 'api_endpoint'" + "Cannot use an awaitable embedding_dimension with async_setup " + "set to False" ) - - if not self.async_astra_db and self.astra_db and supports_async: - self.async_astra_db = db.AsyncAstraDB( - token=self.astra_db.token, - api_endpoint=self.astra_db.base_url, - api_path=self.astra_db.api_path, - api_version=self.astra_db.api_version, - namespace=self.astra_db.namespace, + self.astra_db.create_collection( + collection_name, + dimension=embedding_dimension, # type: ignore[arg-type] + metric=metric, ) + + def ensure_db_setup(self) -> None: + if self.async_setup_db_task: + try: + self.async_setup_db_task.result() + except InvalidStateError: + raise ValueError( + "Asynchronous setup of the DB not finished. " + "NB: AstraDB components sync methods shouldn't be called from the " + "event loop. Consider using their async equivalents." + ) + + async def aensure_db_setup(self) -> None: + if self.async_setup_db_task: + await self.async_setup_db_task diff --git a/libs/community/langchain_community/utilities/github.py b/libs/community/langchain_community/utilities/github.py index 6c2950dcc99b7..410fdb494b7c3 100644 --- a/libs/community/langchain_community/utilities/github.py +++ b/libs/community/langchain_community/utilities/github.py @@ -735,7 +735,7 @@ def search_issues_and_prs(self, query: str) -> str: str: A string containing the first 5 issues and pull requests """ search_result = self.github.search_issues(query, repo=self.github_repository) - max_items = min(5, len(search_result)) + max_items = min(5, search_result.totalCount) results = [f"Top {max_items} results:"] for issue in search_result[:max_items]: results.append( diff --git a/libs/community/langchain_community/utilities/graphql.py b/libs/community/langchain_community/utilities/graphql.py index 87be94d09c362..a576419e5be07 100644 --- a/libs/community/langchain_community/utilities/graphql.py +++ b/libs/community/langchain_community/utilities/graphql.py @@ -37,7 +37,10 @@ def validate_environment(cls, values: Dict) -> Dict: url=values["graphql_endpoint"], headers=headers, ) - client = Client(transport=transport, fetch_schema_from_transport=True) + fetch_schema_from_transport = values.get("fetch_schema_from_transport", True) + client = Client( + transport=transport, fetch_schema_from_transport=fetch_schema_from_transport + ) values["gql_client"] = client values["gql_function"] = gql return values diff --git a/libs/community/langchain_community/utilities/pebblo.py b/libs/community/langchain_community/utilities/pebblo.py new file mode 100644 index 0000000000000..a52d3c45b4613 --- /dev/null +++ b/libs/community/langchain_community/utilities/pebblo.py @@ -0,0 +1,249 @@ +from __future__ import annotations + +import logging +import os +import pathlib +import platform +from typing import Optional, Tuple + +from langchain_core.env import get_runtime_environment +from langchain_core.pydantic_v1 import BaseModel + +from langchain_community.document_loaders.base import BaseLoader + +logger = logging.getLogger(__name__) + +PLUGIN_VERSION = "0.1.0" +CLASSIFIER_URL = os.getenv("PEBBLO_CLASSIFIER_URL", "http://localhost:8000") + +# Supported loaders for Pebblo safe data loading +file_loader = [ + "JSONLoader", + "S3FileLoader", + "UnstructuredMarkdownLoader", + "UnstructuredPDFLoader", + "UnstructuredFileLoader", + "UnstructuredJsonLoader", + "PyPDFLoader", + "GCSFileLoader", + "AmazonTextractPDFLoader", + "CSVLoader", + "UnstructuredExcelLoader", +] +dir_loader = ["DirectoryLoader", "S3DirLoader", "PyPDFDirectoryLoader"] +in_memory = ["DataFrameLoader"] + +LOADER_TYPE_MAPPING = {"file": file_loader, "dir": dir_loader, "in-memory": in_memory} + +SUPPORTED_LOADERS = (*file_loader, *dir_loader, *in_memory) + +logger = logging.getLogger(__name__) + + +class Runtime(BaseModel): + """This class represents a Runtime. + + Args: + type (Optional[str]): Runtime type. Defaults to "" + host (str): Hostname of runtime. + path (str): Current working directory path. + ip (Optional[str]): Ip of current runtime. Defaults to "" + platform (str): Platform details of current runtime. + os (str): OS name. + os_version (str): OS version. + language (str): Runtime kernel. + language_version (str): version of current runtime kernel. + runtime (Optional[str]) More runtime details. Defaults to "" + """ + + type: str = "local" + host: str + path: str + ip: Optional[str] = "" + platform: str + os: str + os_version: str + language: str + language_version: str + runtime: str = "local" + + +class Framework(BaseModel): + """This class represents a Framework instance. + + Args: + name (str): Name of the Framework. + version (str): Version of the Framework. + """ + + name: str + version: str + + +class App(BaseModel): + """This class represents an AI application. + + Args: + name (str): Name of the app. + owner (str): Owner of the app. + description (Optional[str]): Description of the app. + load_id (str): Unique load_id of the app instance. + runtime (Runtime): Runtime details of app. + framework (Framework): Framework details of the app + plugin_version (str): Plugin version used for the app. + """ + + name: str + owner: str + description: Optional[str] + load_id: str + runtime: Runtime + framework: Framework + plugin_version: str + + +class Doc(BaseModel): + """This class represents a pebblo document. + + Args: + name (str): Name of app originating this document. + owner (str): Owner of app. + docs (list): List of documents with its metadata. + plugin_version (str): Pebblo plugin Version + load_id (str): Unique load_id of the app instance. + loader_details (dict): Loader details with its metadata. + loading_end (bool): Boolean, specifying end of loading of source. + source_owner (str): Owner of the source of the loader. + """ + + name: str + owner: str + docs: list + plugin_version: str + load_id: str + loader_details: dict + loading_end: bool + source_owner: str + + +def get_full_path(path: str) -> str: + """Return absolute local path for a local file/directory, + for network related path, return as is. + + Args: + path (str): Relative path to be resolved. + + Returns: + str: Resolved absolute path. + """ + if ( + not path + or ("://" in path) + or ("/" == path[0]) + or (path in ["unknown", "-", "in-memory"]) + ): + return path + full_path = pathlib.Path(path).resolve() + return str(full_path) + + +def get_loader_type(loader: str) -> str: + """Return loader type among, file, dir or in-memory. + + Args: + loader (str): Name of the loader, whose type is to be resolved. + + Returns: + str: One of the loader type among, file/dir/in-memory. + """ + for loader_type, loaders in LOADER_TYPE_MAPPING.items(): + if loader in loaders: + return loader_type + return "unknown" + + +def get_loader_full_path(loader: BaseLoader) -> str: + """Return absolute source path of source of loader based on the + keys present in Document object from loader. + + Args: + loader (BaseLoader): Langchain document loader, derived from Baseloader. + """ + from langchain_community.document_loaders import ( + DataFrameLoader, + GCSFileLoader, + S3FileLoader, + ) + + location = "-" + if not isinstance(loader, BaseLoader): + logger.error( + "loader is not derived from BaseLoader, source location will be unknown!" + ) + return location + loader_dict = loader.__dict__ + try: + if "bucket" in loader_dict: + if isinstance(loader, GCSFileLoader): + location = f"gc://{loader.bucket}/{loader.blob}" + elif isinstance(loader, S3FileLoader): + location = f"s3://{loader.bucket}/{loader.key}" + elif "path" in loader_dict: + location = loader_dict["path"] + elif "file_path" in loader_dict: + location = loader_dict["file_path"] + elif "web_paths" in loader_dict: + location = loader_dict["web_paths"][0] + # For in-memory types: + elif isinstance(loader, DataFrameLoader): + location = "in-memory" + except Exception: + pass + return get_full_path(str(location)) + + +def get_runtime() -> Tuple[Framework, Runtime]: + """Fetch the current Framework and Runtime details. + + Returns: + Tuple[Framework, Runtime]: Framework and Runtime for the current app instance. + """ + runtime_env = get_runtime_environment() + framework = Framework( + name="langchain", version=runtime_env.get("library_version", None) + ) + uname = platform.uname() + runtime = Runtime( + host=uname.node, + path=os.environ["PWD"], + platform=runtime_env.get("platform", "unknown"), + os=uname.system, + os_version=uname.version, + ip=get_ip(), + language=runtime_env.get("runtime", "unknown"), + language_version=runtime_env.get("runtime_version", "unknown"), + ) + + if "Darwin" in runtime.os: + runtime.type = "desktop" + runtime.runtime = "Mac OSX" + + logger.debug(f"framework {framework}") + logger.debug(f"runtime {runtime}") + return framework, runtime + + +def get_ip() -> str: + """Fetch local runtime ip address + + Returns: + str: IP address + """ + import socket # lazy imports + + host = socket.gethostname() + try: + public_ip = socket.gethostbyname(host) + except Exception: + public_ip = socket.gethostbyname("localhost") + return public_ip diff --git a/libs/community/langchain_community/utilities/sql_database.py b/libs/community/langchain_community/utilities/sql_database.py index 087ac44bc112f..fb542cfa88392 100644 --- a/libs/community/langchain_community/utilities/sql_database.py +++ b/libs/community/langchain_community/utilities/sql_database.py @@ -7,18 +7,17 @@ from langchain_core._api import deprecated from langchain_core.utils import get_from_env from sqlalchemy import ( - Executable, MetaData, - Result, Table, create_engine, inspect, select, text, ) -from sqlalchemy.engine import Engine +from sqlalchemy.engine import Engine, Result from sqlalchemy.exc import ProgrammingError, SQLAlchemyError from sqlalchemy.schema import CreateTable +from sqlalchemy.sql.expression import Executable from sqlalchemy.types import NullType diff --git a/libs/community/langchain_community/utils/google.py b/libs/community/langchain_community/utils/google.py new file mode 100644 index 0000000000000..4e68512296b30 --- /dev/null +++ b/libs/community/langchain_community/utils/google.py @@ -0,0 +1,25 @@ +"""Utilities to use Google provided components.""" + +from importlib import metadata +from typing import Any, Optional + + +def get_client_info(module: Optional[str] = None) -> Any: + r"""Returns a custom user agent header. + + Args: + module (Optional[str]): + Optional. The module for a custom user agent header. + Returns: + google.api_core.gapic_v1.client_info.ClientInfo + """ + from google.api_core.gapic_v1.client_info import ClientInfo + + langchain_version = metadata.version("langchain") + client_library_version = ( + f"{langchain_version}-{module}" if module else langchain_version + ) + return ClientInfo( + client_library_version=client_library_version, + user_agent=f"langchain/{client_library_version}", + ) diff --git a/libs/community/langchain_community/vectorstores/__init__.py b/libs/community/langchain_community/vectorstores/__init__.py index 61b573bb64952..da412557f3753 100644 --- a/libs/community/langchain_community/vectorstores/__init__.py +++ b/libs/community/langchain_community/vectorstores/__init__.py @@ -74,6 +74,12 @@ def _import_annoy() -> Any: return Annoy +def _import_apache_doris() -> Any: + from langchain_community.vectorstores.apache_doris import ApacheDoris + + return ApacheDoris + + def _import_atlas() -> Any: from langchain_community.vectorstores.atlas import AtlasDB @@ -497,6 +503,8 @@ def __getattr__(name: str) -> Any: return _import_elastic_vector_search() elif name == "Annoy": return _import_annoy() + elif name == "ApacheDoris": + return _import_apache_doris() elif name == "AtlasDB": return _import_atlas() elif name == "AwaDB": @@ -640,6 +648,7 @@ def __getattr__(name: str) -> Any: "AlibabaCloudOpenSearchSettings", "AnalyticDB", "Annoy", + "ApacheDoris", "AtlasDB", "AwaDB", "AzureSearch", diff --git a/libs/community/langchain_community/vectorstores/apache_doris.py b/libs/community/langchain_community/vectorstores/apache_doris.py new file mode 100644 index 0000000000000..12e9b58304f29 --- /dev/null +++ b/libs/community/langchain_community/vectorstores/apache_doris.py @@ -0,0 +1,480 @@ +from __future__ import annotations + +import json +import logging +from hashlib import sha1 +from threading import Thread +from typing import Any, Dict, Iterable, List, Optional, Tuple + +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.pydantic_v1 import BaseSettings +from langchain_core.vectorstores import VectorStore + +logger = logging.getLogger() +DEBUG = False + + +class ApacheDorisSettings(BaseSettings): + """Apache Doris client configuration. + + Attributes: + apache_doris_host (str) : An URL to connect to frontend. + Defaults to 'localhost'. + apache_doris_port (int) : URL port to connect with HTTP. Defaults to 9030. + username (str) : Username to login. Defaults to 'root'. + password (str) : Password to login. Defaults to None. + database (str) : Database name to find the table. Defaults to 'default'. + table (str) : Table name to operate on. + Defaults to 'langchain'. + + column_map (Dict) : Column type map to project column name onto langchain + semantics. Must have keys: `text`, `id`, `vector`, + must be same size to number of columns. For example: + .. code-block:: python + + { + 'id': 'text_id', + 'embedding': 'text_embedding', + 'document': 'text_plain', + 'metadata': 'metadata_dictionary_in_json', + } + + Defaults to identity map. + """ + + host: str = "localhost" + port: int = 9030 + username: str = "root" + password: str = "" + + column_map: Dict[str, str] = { + "id": "id", + "document": "document", + "embedding": "embedding", + "metadata": "metadata", + } + + database: str = "default" + table: str = "langchain" + + def __getitem__(self, item: str) -> Any: + return getattr(self, item) + + class Config: + env_file = ".env" + env_prefix = "apache_doris_" + env_file_encoding = "utf-8" + + +class ApacheDoris(VectorStore): + """`Apache Doris` vector store. + + You need a `pymysql` python package, and a valid account + to connect to Apache Doris. + + For more information, please visit + [Apache Doris official site](https://doris.apache.org/) + [Apache Doris github](https://github.com/apache/doris) + """ + + def __init__( + self, + embedding: Embeddings, + *, + config: Optional[ApacheDorisSettings] = None, + **kwargs: Any, + ) -> None: + """Constructor for Apache Doris. + + Args: + embedding (Embeddings): Text embedding model. + config (ApacheDorisSettings): Apache Doris client configuration information. + """ + try: + import pymysql # type: ignore[import] + except ImportError: + raise ImportError( + "Could not import pymysql python package. " + "Please install it with `pip install pymysql`." + ) + try: + from tqdm import tqdm + + self.pgbar = tqdm + except ImportError: + # Just in case if tqdm is not installed + self.pgbar = lambda x, **kwargs: x + super().__init__() + if config is not None: + self.config = config + else: + self.config = ApacheDorisSettings() + assert self.config + assert self.config.host and self.config.port + assert self.config.column_map and self.config.database and self.config.table + for k in ["id", "embedding", "document", "metadata"]: + assert k in self.config.column_map + + # initialize the schema + dim = len(embedding.embed_query("test")) + + self.schema = f"""\ +CREATE TABLE IF NOT EXISTS {self.config.database}.{self.config.table}( + {self.config.column_map['id']} varchar(50), + {self.config.column_map['document']} string, + {self.config.column_map['embedding']} array, + {self.config.column_map['metadata']} string +) ENGINE = OLAP UNIQUE KEY(id) DISTRIBUTED BY HASH(id) \ + PROPERTIES ("replication_allocation" = "tag.location.default: 1")\ +""" + self.dim = dim + self.BS = "\\" + self.must_escape = ("\\", "'") + self._embedding = embedding + self.dist_order = "DESC" + _debug_output(self.config) + + # Create a connection to Apache Doris + self.connection = pymysql.connect( + host=self.config.host, + port=self.config.port, + user=self.config.username, + password=self.config.password, + database=self.config.database, + **kwargs, + ) + + _debug_output(self.schema) + _get_named_result(self.connection, self.schema) + + def escape_str(self, value: str) -> str: + return "".join(f"{self.BS}{c}" if c in self.must_escape else c for c in value) + + @property + def embeddings(self) -> Embeddings: + return self._embedding + + def _build_insert_sql(self, transac: Iterable, column_names: Iterable[str]) -> str: + ks = ",".join(column_names) + embed_tuple_index = tuple(column_names).index( + self.config.column_map["embedding"] + ) + _data = [] + for n in transac: + n = ",".join( + [ + ( + f"'{self.escape_str(str(_n))}'" + if idx != embed_tuple_index + else f"array{str(_n)}" + ) + for (idx, _n) in enumerate(n) + ] + ) + _data.append(f"({n})") + i_str = f""" + INSERT INTO + {self.config.database}.{self.config.table}({ks}) + VALUES + {','.join(_data)} + """ + return i_str + + def _insert(self, transac: Iterable, column_names: Iterable[str]) -> None: + _insert_query = self._build_insert_sql(transac, column_names) + _debug_output(_insert_query) + _get_named_result(self.connection, _insert_query) + + def add_texts( + self, + texts: Iterable[str], + metadatas: Optional[List[dict]] = None, + batch_size: int = 32, + ids: Optional[Iterable[str]] = None, + **kwargs: Any, + ) -> List[str]: + """Insert more texts through the embeddings and add to the VectorStore. + + Args: + texts: Iterable of strings to add to the VectorStore. + ids: Optional list of ids to associate with the texts. + batch_size: Batch size of insertion + metadata: Optional column data to be inserted + + Returns: + List of ids from adding the texts into the VectorStore. + + """ + # Embed and create the documents + ids = ids or [sha1(t.encode("utf-8")).hexdigest() for t in texts] + colmap_ = self.config.column_map + transac = [] + column_names = { + colmap_["id"]: ids, + colmap_["document"]: texts, + colmap_["embedding"]: self._embedding.embed_documents(list(texts)), + } + metadatas = metadatas or [{} for _ in texts] + column_names[colmap_["metadata"]] = map(json.dumps, metadatas) + assert len(set(colmap_) - set(column_names)) >= 0 + keys, values = zip(*column_names.items()) + try: + t = None + for v in self.pgbar( + zip(*values), desc="Inserting data...", total=len(metadatas) + ): + assert ( + len(v[keys.index(self.config.column_map["embedding"])]) == self.dim + ) + transac.append(v) + if len(transac) == batch_size: + if t: + t.join() + t = Thread(target=self._insert, args=[transac, keys]) + t.start() + transac = [] + if len(transac) > 0: + if t: + t.join() + self._insert(transac, keys) + return [i for i in ids] + except Exception as e: + logger.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + return [] + + @classmethod + def from_texts( + cls, + texts: List[str], + embedding: Embeddings, + metadatas: Optional[List[Dict[Any, Any]]] = None, + config: Optional[ApacheDorisSettings] = None, + text_ids: Optional[Iterable[str]] = None, + batch_size: int = 32, + **kwargs: Any, + ) -> ApacheDoris: + """Create Apache Doris wrapper with existing texts + + Args: + embedding_function (Embeddings): Function to extract text embedding + texts (Iterable[str]): List or tuple of strings to be added + config (ApacheDorisSettings, Optional): Apache Doris configuration + text_ids (Optional[Iterable], optional): IDs for the texts. + Defaults to None. + batch_size (int, optional): BatchSize when transmitting data to Apache + Doris. Defaults to 32. + metadata (List[dict], optional): metadata to texts. Defaults to None. + Returns: + Apache Doris Index + """ + ctx = cls(embedding, config=config, **kwargs) + ctx.add_texts(texts, ids=text_ids, batch_size=batch_size, metadatas=metadatas) + return ctx + + def __repr__(self) -> str: + """Text representation for Apache Doris Vector Store, prints frontends, username + and schemas. Easy to use with `str(ApacheDoris())` + + Returns: + repr: string to show connection info and data schema + """ + _repr = f"\033[92m\033[1m{self.config.database}.{self.config.table} @ " + _repr += f"{self.config.host}:{self.config.port}\033[0m\n\n" + _repr += f"\033[1musername: {self.config.username}\033[0m\n\nTable Schema:\n" + width = 25 + fields = 3 + _repr += "-" * (width * fields + 1) + "\n" + columns = ["name", "type", "key"] + _repr += f"|\033[94m{columns[0]:24s}\033[0m|\033[96m{columns[1]:24s}" + _repr += f"\033[0m|\033[96m{columns[2]:24s}\033[0m|\n" + _repr += "-" * (width * fields + 1) + "\n" + q_str = f"DESC {self.config.database}.{self.config.table}" + _debug_output(q_str) + rs = _get_named_result(self.connection, q_str) + for r in rs: + _repr += f"|\033[94m{r['Field']:24s}\033[0m|\033[96m{r['Type']:24s}" + _repr += f"\033[0m|\033[96m{r['Key']:24s}\033[0m|\n" + _repr += "-" * (width * fields + 1) + "\n" + return _repr + + def _build_query_sql( + self, q_emb: List[float], topk: int, where_str: Optional[str] = None + ) -> str: + q_emb_str = ",".join(map(str, q_emb)) + if where_str: + where_str = f"WHERE {where_str}" + else: + where_str = "" + + q_str = f""" + SELECT {self.config.column_map['document']}, + {self.config.column_map['metadata']}, + cosine_distance(array[{q_emb_str}], + {self.config.column_map['embedding']}) as dist + FROM {self.config.database}.{self.config.table} + {where_str} + ORDER BY dist {self.dist_order} + LIMIT {topk} + """ + + _debug_output(q_str) + return q_str + + def similarity_search( + self, query: str, k: int = 4, where_str: Optional[str] = None, **kwargs: Any + ) -> List[Document]: + """Perform a similarity search with Apache Doris + + Args: + query (str): query string + k (int, optional): Top K neighbors to retrieve. Defaults to 4. + where_str (Optional[str], optional): where condition string. + Defaults to None. + + NOTE: Please do not let end-user to fill this and always be aware + of SQL injection. When dealing with metadatas, remember to + use `{self.metadata_column}.attribute` instead of `attribute` + alone. The default name for it is `metadata`. + + Returns: + List[Document]: List of Documents + """ + return self.similarity_search_by_vector( + self._embedding.embed_query(query), k, where_str, **kwargs + ) + + def similarity_search_by_vector( + self, + embedding: List[float], + k: int = 4, + where_str: Optional[str] = None, + **kwargs: Any, + ) -> List[Document]: + """Perform a similarity search with Apache Doris by vectors + + Args: + query (str): query string + k (int, optional): Top K neighbors to retrieve. Defaults to 4. + where_str (Optional[str], optional): where condition string. + Defaults to None. + + NOTE: Please do not let end-user to fill this and always be aware + of SQL injection. When dealing with metadatas, remember to + use `{self.metadata_column}.attribute` instead of `attribute` + alone. The default name for it is `metadata`. + + Returns: + List[Document]: List of (Document, similarity) + """ + q_str = self._build_query_sql(embedding, k, where_str) + try: + return [ + Document( + page_content=r[self.config.column_map["document"]], + metadata=json.loads(r[self.config.column_map["metadata"]]), + ) + for r in _get_named_result(self.connection, q_str) + ] + except Exception as e: + logger.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + return [] + + def similarity_search_with_relevance_scores( + self, query: str, k: int = 4, where_str: Optional[str] = None, **kwargs: Any + ) -> List[Tuple[Document, float]]: + """Perform a similarity search with Apache Doris + + Args: + query (str): query string + k (int, optional): Top K neighbors to retrieve. Defaults to 4. + where_str (Optional[str], optional): where condition string. + Defaults to None. + + NOTE: Please do not let end-user to fill this and always be aware + of SQL injection. When dealing with metadatas, remember to + use `{self.metadata_column}.attribute` instead of `attribute` + alone. The default name for it is `metadata`. + + Returns: + List[Document]: List of documents + """ + q_str = self._build_query_sql(self._embedding.embed_query(query), k, where_str) + try: + return [ + ( + Document( + page_content=r[self.config.column_map["document"]], + metadata=json.loads(r[self.config.column_map["metadata"]]), + ), + r["dist"], + ) + for r in _get_named_result(self.connection, q_str) + ] + except Exception as e: + logger.error(f"\033[91m\033[1m{type(e)}\033[0m \033[95m{str(e)}\033[0m") + return [] + + def drop(self) -> None: + """ + Helper function: Drop data + """ + _get_named_result( + self.connection, + f"DROP TABLE IF EXISTS {self.config.database}.{self.config.table}", + ) + + @property + def metadata_column(self) -> str: + return self.config.column_map["metadata"] + + +def _has_mul_sub_str(s: str, *args: Any) -> bool: + """Check if a string has multiple substrings. + + Args: + s: The string to check + *args: The substrings to check for in the string + + Returns: + bool: True if all substrings are present in the string, False otherwise + """ + for a in args: + if a not in s: + return False + return True + + +def _debug_output(s: Any) -> None: + """Print a debug message if DEBUG is True. + + Args: + s: The message to print + """ + if DEBUG: + print(s) # noqa: T201 + + +def _get_named_result(connection: Any, query: str) -> List[dict[str, Any]]: + """Get a named result from a query. + + Args: + connection: The connection to the database + query: The query to execute + + Returns: + List[dict[str, Any]]: The result of the query + """ + cursor = connection.cursor() + cursor.execute(query) + columns = cursor.description + result = [] + for value in cursor.fetchall(): + r = {} + for idx, datum in enumerate(value): + k = columns[idx][0] + r[k] = datum + result.append(r) + _debug_output(result) + cursor.close() + return result diff --git a/libs/community/langchain_community/vectorstores/astradb.py b/libs/community/langchain_community/vectorstores/astradb.py index e6d1a5e010072..67751e4410c54 100644 --- a/libs/community/langchain_community/vectorstores/astradb.py +++ b/libs/community/langchain_community/vectorstores/astradb.py @@ -1,13 +1,12 @@ from __future__ import annotations -import asyncio import uuid import warnings -from asyncio import Task from concurrent.futures import ThreadPoolExecutor from typing import ( TYPE_CHECKING, Any, + Awaitable, Callable, Dict, Iterable, @@ -17,16 +16,21 @@ Tuple, Type, TypeVar, + Union, ) import numpy as np +from langchain_core._api.deprecation import deprecated from langchain_core.documents import Document from langchain_core.embeddings import Embeddings -from langchain_core.runnables import run_in_executor from langchain_core.runnables.utils import gather_with_concurrency from langchain_core.utils.iter import batch_iterate from langchain_core.vectorstores import VectorStore +from langchain_community.utilities.astradb import ( + SetupMode, + _AstraDBCollectionEnvironment, +) from langchain_community.vectorstores.utils import maximal_marginal_relevance if TYPE_CHECKING: @@ -61,6 +65,11 @@ def _unique_list(lst: List[T], key: Callable[[T], U]) -> List[T]: return new_lst +@deprecated( + since="0.1.23", + removal="0.2.0", + alternative_import="langchain_astradb.AstraDBVectorStore", +) class AstraDB(VectorStore): """Wrapper around DataStax Astra DB for vector-store workloads. @@ -161,28 +170,12 @@ def __init__( bulk_insert_batch_concurrency: Optional[int] = None, bulk_insert_overwrite_concurrency: Optional[int] = None, bulk_delete_concurrency: Optional[int] = None, + setup_mode: SetupMode = SetupMode.SYNC, pre_delete_collection: bool = False, ) -> None: """ Create an AstraDB vector store object. See class docstring for help. """ - try: - from astrapy.db import AstraDB as LibAstraDB - from astrapy.db import AstraDBCollection - except (ImportError, ModuleNotFoundError): - raise ImportError( - "Could not import a recent astrapy python package. " - "Please install it with `pip install --upgrade astrapy`." - ) - - # Conflicting-arg checks: - if astra_db_client is not None or async_astra_db_client is not None: - if token is not None or api_endpoint is not None: - raise ValueError( - "You cannot pass 'astra_db_client' or 'async_astra_db_client' to " - "AstraDB if passing 'token' and 'api_endpoint'." - ) - self.embedding = embedding self.collection_name = collection_name self.token = token @@ -201,105 +194,35 @@ def __init__( bulk_delete_concurrency or DEFAULT_BULK_DELETE_CONCURRENCY ) # "vector-related" settings - self._embedding_dimension: Optional[int] = None self.metric = metric + embedding_dimension: Union[int, Awaitable[int], None] = None + if setup_mode == SetupMode.ASYNC: + embedding_dimension = self._aget_embedding_dimension() + elif setup_mode == SetupMode.SYNC: + embedding_dimension = self._get_embedding_dimension() - self.astra_db = astra_db_client - self.async_astra_db = async_astra_db_client - self.collection = None - self.async_collection = None - - if token and api_endpoint: - self.astra_db = LibAstraDB( - token=self.token, - api_endpoint=self.api_endpoint, - namespace=self.namespace, - ) - try: - from astrapy.db import AsyncAstraDB - - self.async_astra_db = AsyncAstraDB( - token=self.token, - api_endpoint=self.api_endpoint, - namespace=self.namespace, - ) - except (ImportError, ModuleNotFoundError): - pass - - if self.astra_db is not None: - self.collection = AstraDBCollection( - collection_name=self.collection_name, - astra_db=self.astra_db, - ) - - self.async_setup_db_task: Optional[Task] = None - if self.async_astra_db is not None: - from astrapy.db import AsyncAstraDBCollection - - self.async_collection = AsyncAstraDBCollection( - collection_name=self.collection_name, - astra_db=self.async_astra_db, - ) - try: - self.async_setup_db_task = asyncio.create_task( - self._setup_db(pre_delete_collection) - ) - except RuntimeError: - pass - - if self.async_setup_db_task is None: - if not pre_delete_collection: - self._provision_collection() - else: - self.clear() - - def _ensure_astra_db_client(self): # type: ignore[no-untyped-def] - if not self.astra_db: - raise ValueError("Missing AstraDB client") - - async def _setup_db(self, pre_delete_collection: bool) -> None: - if pre_delete_collection: - await self.async_astra_db.delete_collection( # type: ignore[union-attr] - collection_name=self.collection_name, - ) - await self._aprovision_collection() - - async def _ensure_db_setup(self) -> None: - if self.async_setup_db_task: - await self.async_setup_db_task - - def _get_embedding_dimension(self) -> int: - if self._embedding_dimension is None: - self._embedding_dimension = len( - self.embedding.embed_query("This is a sample sentence.") - ) - return self._embedding_dimension - - def _provision_collection(self) -> None: - """ - Run the API invocation to create the collection on the backend. - - Internal-usage method, no object members are set, - other than working on the underlying actual storage. - """ - self.astra_db.create_collection( # type: ignore[union-attr] - dimension=self._get_embedding_dimension(), - collection_name=self.collection_name, - metric=self.metric, + self.astra_env = _AstraDBCollectionEnvironment( + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + astra_db_client=astra_db_client, + async_astra_db_client=async_astra_db_client, + namespace=namespace, + setup_mode=setup_mode, + pre_delete_collection=pre_delete_collection, + embedding_dimension=embedding_dimension, + metric=metric, ) + self.astra_db = self.astra_env.astra_db + self.async_astra_db = self.astra_env.async_astra_db + self.collection = self.astra_env.collection + self.async_collection = self.astra_env.async_collection - async def _aprovision_collection(self) -> None: - """ - Run the API invocation to create the collection on the backend. + def _get_embedding_dimension(self) -> int: + return len(self.embedding.embed_query(text="This is a sample sentence.")) - Internal-usage method, no object members are set, - other than working on the underlying actual storage. - """ - await self.async_astra_db.create_collection( # type: ignore[union-attr] - dimension=self._get_embedding_dimension(), - collection_name=self.collection_name, - metric=self.metric, - ) + async def _aget_embedding_dimension(self) -> int: + return len(await self.embedding.aembed_query(text="This is a sample sentence.")) @property def embeddings(self) -> Embeddings: @@ -320,14 +243,12 @@ def _select_relevance_score_fn(self) -> Callable[[float], float]: def clear(self) -> None: """Empty the collection of all its stored entries.""" - self.delete_collection() - self._provision_collection() + self.astra_env.ensure_db_setup() + self.collection.delete_many({}) async def aclear(self) -> None: """Empty the collection of all its stored entries.""" - await self._ensure_db_setup() - if not self.async_astra_db: - await run_in_executor(None, self.clear) + await self.astra_env.aensure_db_setup() await self.async_collection.delete_many({}) # type: ignore[union-attr] def delete_by_document_id(self, document_id: str) -> bool: @@ -335,7 +256,7 @@ def delete_by_document_id(self, document_id: str) -> bool: Remove a single document from the store, given its document_id (str). Return True if a document has indeed been deleted, False if ID not found. """ - self._ensure_astra_db_client() + self.astra_env.ensure_db_setup() deletion_response = self.collection.delete_one(document_id) # type: ignore[union-attr] return ((deletion_response or {}).get("status") or {}).get( "deletedCount", 0 @@ -346,9 +267,7 @@ async def adelete_by_document_id(self, document_id: str) -> bool: Remove a single document from the store, given its document_id (str). Return True if a document has indeed been deleted, False if ID not found. """ - await self._ensure_db_setup() - if not self.async_collection: - return await run_in_executor(None, self.delete_by_document_id, document_id) + await self.astra_env.aensure_db_setup() deletion_response = await self.async_collection.delete_one(document_id) return ((deletion_response or {}).get("status") or {}).get( "deletedCount", 0 @@ -433,8 +352,8 @@ def delete_collection(self) -> None: Stored data is lost and unrecoverable, resources are freed. Use with caution. """ - self._ensure_astra_db_client() - self.astra_db.delete_collection( # type: ignore[union-attr] + self.astra_env.ensure_db_setup() + self.astra_db.delete_collection( collection_name=self.collection_name, ) @@ -445,10 +364,8 @@ async def adelete_collection(self) -> None: Stored data is lost and unrecoverable, resources are freed. Use with caution. """ - await self._ensure_db_setup() - if not self.async_astra_db: - await run_in_executor(None, self.delete_collection) - await self.async_astra_db.delete_collection( # type: ignore[union-attr] + await self.astra_env.aensure_db_setup() + await self.async_astra_db.delete_collection( collection_name=self.collection_name, ) @@ -563,7 +480,7 @@ def add_texts( f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " "which will be ignored." ) - self._ensure_astra_db_client() + self.astra_env.ensure_db_setup() embedding_vectors = self.embedding.embed_documents(list(texts)) documents_to_insert = self._get_documents_to_insert( @@ -649,22 +566,13 @@ async def aadd_texts( Returns: List[str]: List of ids of the added texts. """ - await self._ensure_db_setup() - if not self.async_collection: - await super().aadd_texts( - texts, - metadatas, - ids=ids, - batch_size=batch_size, - batch_concurrency=batch_concurrency, - overwrite_concurrency=overwrite_concurrency, - ) if kwargs: warnings.warn( "Method 'aadd_texts' of AstraDB vector store invoked with " f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " "which will be ignored." ) + await self.astra_env.aensure_db_setup() embedding_vectors = await self.embedding.aembed_documents(list(texts)) documents_to_insert = self._get_documents_to_insert( @@ -725,7 +633,7 @@ def similarity_search_with_score_id_by_vector( Returns: List of (Document, score, id), the most similar to the query vector. """ - self._ensure_astra_db_client() + self.astra_env.ensure_db_setup() metadata_parameter = self._filter_to_metadata(filter) # hits = list( @@ -767,15 +675,7 @@ async def asimilarity_search_with_score_id_by_vector( Returns: List of (Document, score, id), the most similar to the query vector. """ - await self._ensure_db_setup() - if not self.async_collection: - return await run_in_executor( - None, - self.asimilarity_search_with_score_id_by_vector, # type: ignore[arg-type] - embedding, - k, - filter, - ) + await self.astra_env.aensure_db_setup() metadata_parameter = self._filter_to_metadata(filter) # return [ @@ -1004,7 +904,7 @@ def max_marginal_relevance_search_by_vector( Returns: List of Documents selected by maximal marginal relevance. """ - self._ensure_astra_db_client() + self.astra_env.ensure_db_setup() metadata_parameter = self._filter_to_metadata(filter) prefetch_hits = list( @@ -1045,18 +945,7 @@ async def amax_marginal_relevance_search_by_vector( Returns: List of Documents selected by maximal marginal relevance. """ - await self._ensure_db_setup() - if not self.async_collection: - return await run_in_executor( - None, - self.max_marginal_relevance_search_by_vector, - embedding, - k, - fetch_k, - lambda_mult, - filter, - **kwargs, - ) + await self.astra_env.aensure_db_setup() metadata_parameter = self._filter_to_metadata(filter) prefetch_hits = [ diff --git a/libs/community/langchain_community/vectorstores/azuresearch.py b/libs/community/langchain_community/vectorstores/azuresearch.py index 9006985fa2db2..44455199961f5 100644 --- a/libs/community/langchain_community/vectorstores/azuresearch.py +++ b/libs/community/langchain_community/vectorstores/azuresearch.py @@ -37,14 +37,10 @@ CorsOptions, ScoringProfile, SearchField, + SemanticConfiguration, VectorSearch, ) - try: - from azure.search.documents.indexes.models import SemanticSearch - except ImportError: - from azure.search.documents.indexes.models import SemanticSettings # <11.4.0 - # Allow overriding field names for Azure Search FIELDS_ID = get_from_env( key="AZURESEARCH_FIELDS_ID", env_key="AZURESEARCH_FIELDS_ID", default="id" @@ -73,7 +69,7 @@ def _get_search_client( semantic_configuration_name: Optional[str] = None, fields: Optional[List[SearchField]] = None, vector_search: Optional[VectorSearch] = None, - semantic_settings: Optional[Union[SemanticSearch, SemanticSettings]] = None, + semantic_configurations: Optional[SemanticConfiguration] = None, scoring_profiles: Optional[List[ScoringProfile]] = None, default_scoring_profile: Optional[str] = None, default_fields: Optional[List[SearchField]] = None, @@ -86,30 +82,21 @@ def _get_search_client( from azure.search.documents import SearchClient from azure.search.documents.indexes import SearchIndexClient from azure.search.documents.indexes.models import ( + ExhaustiveKnnAlgorithmConfiguration, + ExhaustiveKnnParameters, + HnswAlgorithmConfiguration, + HnswParameters, SearchIndex, SemanticConfiguration, SemanticField, + SemanticPrioritizedFields, + SemanticSearch, VectorSearch, + VectorSearchAlgorithmKind, + VectorSearchAlgorithmMetric, + VectorSearchProfile, ) - # class names changed for versions >= 11.4.0 - try: - from azure.search.documents.indexes.models import ( - HnswAlgorithmConfiguration, # HnswVectorSearchAlgorithmConfiguration is old - SemanticPrioritizedFields, # PrioritizedFields outdated - SemanticSearch, # SemanticSettings outdated - ) - - NEW_VERSION = True - except ImportError: - from azure.search.documents.indexes.models import ( - HnswVectorSearchAlgorithmConfiguration, - PrioritizedFields, - SemanticSettings, - ) - - NEW_VERSION = False - default_fields = default_fields or [] if key is None: credential = DefaultAzureCredential() @@ -155,77 +142,55 @@ def fmt_err(x: str) -> str: fields = default_fields # Vector search configuration if vector_search is None: - if NEW_VERSION: - # >= 11.4.0: - # VectorSearch(algorithm_configuration) --> VectorSearch(algorithms) - # HnswVectorSearchAlgorithmConfiguration --> HnswAlgorithmConfiguration - vector_search = VectorSearch( - algorithms=[ - HnswAlgorithmConfiguration( - name="default", - kind="hnsw", - parameters={ # type: ignore - "m": 4, - "efConstruction": 400, - "efSearch": 500, - "metric": "cosine", - }, - ) - ] - ) - else: # < 11.4.0 - vector_search = VectorSearch( - algorithm_configurations=[ - HnswVectorSearchAlgorithmConfiguration( - name="default", - kind="hnsw", - parameters={ # type: ignore - "m": 4, - "efConstruction": 400, - "efSearch": 500, - "metric": "cosine", - }, - ) - ] - ) + vector_search = VectorSearch( + algorithms=[ + HnswAlgorithmConfiguration( + name="default", + kind=VectorSearchAlgorithmKind.HNSW, + parameters=HnswParameters( + m=4, + ef_construction=400, + ef_search=500, + metric=VectorSearchAlgorithmMetric.COSINE, + ), + ), + ExhaustiveKnnAlgorithmConfiguration( + name="default_exhaustive_knn", + kind=VectorSearchAlgorithmKind.EXHAUSTIVE_KNN, + parameters=ExhaustiveKnnParameters( + metric=VectorSearchAlgorithmMetric.COSINE + ), + ), + ], + profiles=[ + VectorSearchProfile( + name="myHnswProfile", + algorithm_configuration_name="default", + ), + VectorSearchProfile( + name="myExhaustiveKnnProfile", + algorithm_configuration_name="default_exhaustive_knn", + ), + ], + ) # Create the semantic settings with the configuration - if semantic_settings is None and semantic_configuration_name is not None: - if NEW_VERSION: - # <=11.4.0: SemanticSettings --> SemanticSearch - # PrioritizedFields(prioritized_content_fields) - # --> SemanticPrioritizedFields(content_fields) - semantic_settings = SemanticSearch( - configurations=[ - SemanticConfiguration( - name=semantic_configuration_name, - prioritized_fields=SemanticPrioritizedFields( - content_fields=[ - SemanticField(field_name=FIELDS_CONTENT) - ], - ), - ) - ] - ) - else: # < 11.4.0 - semantic_settings = SemanticSettings( - configurations=[ - SemanticConfiguration( - name=semantic_configuration_name, - prioritized_fields=PrioritizedFields( - prioritized_content_fields=[ - SemanticField(field_name=FIELDS_CONTENT) - ], - ), - ) - ] - ) + semantic_search = None + if semantic_configurations is None and semantic_configuration_name is not None: + semantic_configuration = SemanticConfiguration( + name=semantic_configuration_name, + prioritized_fields=SemanticPrioritizedFields( + content_fields=[SemanticField(field_name=FIELDS_CONTENT)], + ), + ) + semantic_search = SemanticSearch(configurations=[semantic_configuration]) + # Create the search index with the semantic settings and vector search index = SearchIndex( name=index_name, fields=fields, vector_search=vector_search, - semantic_settings=semantic_settings, + semantic_search=semantic_search, scoring_profiles=scoring_profiles, default_scoring_profile=default_scoring_profile, cors_options=cors_options, @@ -251,10 +216,9 @@ def __init__( embedding_function: Union[Callable, Embeddings], search_type: str = "hybrid", semantic_configuration_name: Optional[str] = None, - semantic_query_language: str = "en-us", fields: Optional[List[SearchField]] = None, vector_search: Optional[VectorSearch] = None, - semantic_settings: Optional[Union[SemanticSearch, SemanticSettings]] = None, + semantic_configurations: Optional[SemanticConfiguration] = None, scoring_profiles: Optional[List[ScoringProfile]] = None, default_scoring_profile: Optional[str] = None, cors_options: Optional[CorsOptions] = None, @@ -292,7 +256,7 @@ def __init__( type=SearchFieldDataType.Collection(SearchFieldDataType.Single), searchable=True, vector_search_dimensions=len(self.embed_query("Text")), - vector_search_configuration="default", + vector_search_profile_name="myHnswProfile", ), SearchableField( name=FIELDS_METADATA, @@ -309,7 +273,7 @@ def __init__( semantic_configuration_name=semantic_configuration_name, fields=fields, vector_search=vector_search, - semantic_settings=semantic_settings, + semantic_configurations=semantic_configurations, scoring_profiles=scoring_profiles, default_scoring_profile=default_scoring_profile, default_fields=default_fields, @@ -318,7 +282,6 @@ def __init__( ) self.search_type = search_type self.semantic_configuration_name = semantic_configuration_name - self.semantic_query_language = semantic_query_language self.fields = fields if fields else default_fields @property @@ -451,39 +414,30 @@ def vector_search_with_score( Returns: List of Documents most similar to the query and score for each """ - from azure.search.documents.models import Vector + + from azure.search.documents.models import VectorizedQuery results = self.client.search( search_text="", - vectors=[ - Vector( - value=np.array(self.embed_query(query), dtype=np.float32).tolist(), - k=k, + vector_queries=[ + VectorizedQuery( + vector=np.array(self.embed_query(query), dtype=np.float32).tolist(), + k_nearest_neighbors=k, fields=FIELDS_CONTENT_VECTOR, ) ], filter=filters, + top=k, ) # Convert results to Document objects docs = [ ( Document( page_content=result.pop(FIELDS_CONTENT), - metadata={ - **( - {FIELDS_ID: result.pop(FIELDS_ID)} - if FIELDS_ID in result - else {} - ), - **( - json.loads(result[FIELDS_METADATA]) - if FIELDS_METADATA in result - else { - k: v - for k, v in result.items() - if k != FIELDS_CONTENT_VECTOR - } - ), + metadata=json.loads(result[FIELDS_METADATA]) + if FIELDS_METADATA in result + else { + k: v for k, v in result.items() if k != FIELDS_CONTENT_VECTOR }, ), float(result["@search.score"]), @@ -520,14 +474,14 @@ def hybrid_search_with_score( Returns: List of Documents most similar to the query and score for each """ - from azure.search.documents.models import Vector + from azure.search.documents.models import VectorizedQuery results = self.client.search( search_text=query, - vectors=[ - Vector( - value=np.array(self.embed_query(query), dtype=np.float32).tolist(), - k=k, + vector_queries=[ + VectorizedQuery( + vector=np.array(self.embed_query(query), dtype=np.float32).tolist(), + k_nearest_neighbors=k, fields=FIELDS_CONTENT_VECTOR, ) ], @@ -539,21 +493,10 @@ def hybrid_search_with_score( ( Document( page_content=result.pop(FIELDS_CONTENT), - metadata={ - **( - {FIELDS_ID: result.pop(FIELDS_ID)} - if FIELDS_ID in result - else {} - ), - **( - json.loads(result[FIELDS_METADATA]) - if FIELDS_METADATA in result - else { - k: v - for k, v in result.items() - if k != FIELDS_CONTENT_VECTOR - } - ), + metadata=json.loads(result[FIELDS_METADATA]) + if FIELDS_METADATA in result + else { + k: v for k, v in result.items() if k != FIELDS_CONTENT_VECTOR }, ), float(result["@search.score"]), @@ -610,20 +553,19 @@ def semantic_hybrid_search_with_score_and_rerank( Returns: List of Documents most similar to the query and score for each """ - from azure.search.documents.models import Vector + from azure.search.documents.models import VectorizedQuery results = self.client.search( search_text=query, - vectors=[ - Vector( - value=np.array(self.embed_query(query), dtype=np.float32).tolist(), - k=50, + vector_queries=[ + VectorizedQuery( + vector=np.array(self.embed_query(query), dtype=np.float32).tolist(), + k_nearest_neighbors=k, fields=FIELDS_CONTENT_VECTOR, ) ], filter=filters, query_type="semantic", - query_language=self.semantic_query_language, semantic_configuration_name=self.semantic_configuration_name, query_caption="extractive", query_answer="extractive", @@ -643,11 +585,6 @@ def semantic_hybrid_search_with_score_and_rerank( Document( page_content=result.pop(FIELDS_CONTENT), metadata={ - **( - {FIELDS_ID: result.pop(FIELDS_ID)} - if FIELDS_ID in result - else {} - ), **( json.loads(result[FIELDS_METADATA]) if FIELDS_METADATA in result @@ -667,9 +604,7 @@ def semantic_hybrid_search_with_score_and_rerank( if result.get("@search.captions") else {}, "answers": semantic_answers_dict.get( - json.loads(result[FIELDS_METADATA]).get("key") - if FIELDS_METADATA in result - else "", + json.loads(result["metadata"]).get("key"), "", ), }, diff --git a/libs/community/langchain_community/vectorstores/bigquery_vector_search.py b/libs/community/langchain_community/vectorstores/bigquery_vector_search.py index 28edc6536ce59..3c77ffb917ea4 100644 --- a/libs/community/langchain_community/vectorstores/bigquery_vector_search.py +++ b/libs/community/langchain_community/vectorstores/bigquery_vector_search.py @@ -16,6 +16,7 @@ from langchain_core.embeddings import Embeddings from langchain_core.vectorstores import VectorStore +from langchain_community.utils.google import get_client_info from langchain_community.vectorstores.utils import ( DistanceStrategy, maximal_marginal_relevance, @@ -30,7 +31,6 @@ _MIN_INDEX_ROWS = 5000 # minimal number of rows for creating an index _INDEX_CHECK_PERIOD_SECONDS = 60 # Do not check for index more often that this. - _vector_table_lock = Lock() # process-wide BigQueryVectorSearch table lock @@ -90,8 +90,12 @@ def __init__( try: from google.cloud import bigquery + client_info = get_client_info(module="bigquery-vector-search") self.bq_client = bigquery.Client( - project=project_id, location=location, credentials=credentials + project=project_id, + location=location, + credentials=credentials, + client_info=client_info, ) except ModuleNotFoundError: raise ImportError( diff --git a/libs/community/langchain_community/vectorstores/databricks_vector_search.py b/libs/community/langchain_community/vectorstores/databricks_vector_search.py index 8505ea9cbc0bc..0a5e7a6d514c2 100644 --- a/libs/community/langchain_community/vectorstores/databricks_vector_search.py +++ b/libs/community/langchain_community/vectorstores/databricks_vector_search.py @@ -3,12 +3,15 @@ import json import logging import uuid -from typing import TYPE_CHECKING, Any, Iterable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Any, Callable, Iterable, List, Optional, Tuple, Type +import numpy as np from langchain_core.documents import Document from langchain_core.embeddings import Embeddings from langchain_core.vectorstores import VST, VectorStore +from langchain_community.vectorstores.utils import maximal_marginal_relevance + if TYPE_CHECKING: from databricks.vector_search.client import VectorSearchIndex @@ -321,6 +324,126 @@ def similarity_search_with_score( ) return self._parse_search_response(search_resp) + @staticmethod + def _identity_fn(score: float) -> float: + return score + + def _select_relevance_score_fn(self) -> Callable[[float], float]: + """ + Databricks Vector search uses a normalized score 1/(1+d) where d + is the L2 distance. Hence, we simply return the identity function. + """ + + return self._identity_fn + + def max_marginal_relevance_search( + self, + query: str, + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filters: Optional[Any] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + + Args: + query: Text to look up documents similar to. + k: Number of Documents to return. Defaults to 4. + fetch_k: Number of Documents to fetch to pass to MMR algorithm. + lambda_mult: Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Defaults to 0.5. + filters: Filters to apply to the query. Defaults to None. + Returns: + List of Documents selected by maximal marginal relevance. + """ + if not self._is_databricks_managed_embeddings(): + assert self.embeddings is not None, "embedding model is required." + query_vector = self.embeddings.embed_query(query) + else: + raise ValueError( + "`max_marginal_relevance_search` is not supported for index with " + "Databricks-managed embeddings." + ) + + docs = self.max_marginal_relevance_search_by_vector( + query_vector, + k, + fetch_k, + lambda_mult=lambda_mult, + filters=filters, + ) + return docs + + def max_marginal_relevance_search_by_vector( + self, + embedding: List[float], + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filters: Optional[Any] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + + Args: + embedding: Embedding to look up documents similar to. + k: Number of Documents to return. Defaults to 4. + fetch_k: Number of Documents to fetch to pass to MMR algorithm. + lambda_mult: Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Defaults to 0.5. + filters: Filters to apply to the query. Defaults to None. + Returns: + List of Documents selected by maximal marginal relevance. + """ + if not self._is_databricks_managed_embeddings(): + embedding_column = self._embedding_vector_column_name() + else: + raise ValueError( + "`max_marginal_relevance_search` is not supported for index with " + "Databricks-managed embeddings." + ) + + search_resp = self.index.similarity_search( + columns=list(set(self.columns + [embedding_column])), + query_text=None, + query_vector=embedding, + filters=filters, + num_results=fetch_k, + ) + + embeddings_result_index = ( + search_resp.get("manifest").get("columns").index({"name": embedding_column}) + ) + embeddings = [ + doc[embeddings_result_index] + for doc in search_resp.get("result").get("data_array") + ] + + mmr_selected = maximal_marginal_relevance( + np.array(embedding, dtype=np.float32), + embeddings, + k=k, + lambda_mult=lambda_mult, + ) + + ignore_cols: List = ( + [embedding_column] if embedding_column not in self.columns else [] + ) + candidates = self._parse_search_response(search_resp, ignore_cols=ignore_cols) + selected_results = [r[0] for i, r in enumerate(candidates) if i in mmr_selected] + return selected_results + def similarity_search_by_vector( self, embedding: List[float], @@ -373,8 +496,13 @@ def similarity_search_by_vector_with_score( ) return self._parse_search_response(search_resp) - def _parse_search_response(self, search_resp: dict) -> List[Tuple[Document, float]]: + def _parse_search_response( + self, search_resp: dict, ignore_cols: Optional[List[str]] = None + ) -> List[Tuple[Document, float]]: """Parse the search response into a list of Documents with score.""" + if ignore_cols is None: + ignore_cols = [] + columns = [ col["name"] for col in search_resp.get("manifest", dict()).get("columns", []) @@ -386,7 +514,7 @@ def _parse_search_response(self, search_resp: dict) -> List[Tuple[Document, floa metadata = { col: value for col, value in zip(columns[:-1], result[:-1]) - if col not in [self.primary_key, self.text_column] + if col not in ([self.primary_key, self.text_column] + ignore_cols) } metadata[self.primary_key] = doc_id score = result[-1] diff --git a/libs/community/langchain_community/vectorstores/elasticsearch.py b/libs/community/langchain_community/vectorstores/elasticsearch.py index 7b6eef40feaaa..2f6aa4df69769 100644 --- a/libs/community/langchain_community/vectorstores/elasticsearch.py +++ b/libs/community/langchain_community/vectorstores/elasticsearch.py @@ -528,9 +528,9 @@ def __init__( self.strategy = strategy if es_connection is not None: - self.client = es_connection.options( - headers={"user-agent": self.get_user_agent()} - ) + headers = dict(es_connection._headers) + headers.update({"user-agent": self.get_user_agent()}) + self.client = es_connection.options(headers=headers) elif es_url is not None or es_cloud_id is not None: self.client = ElasticsearchStore.connect_to_elasticsearch( es_url=es_url, diff --git a/libs/community/langchain_community/vectorstores/faiss.py b/libs/community/langchain_community/vectorstores/faiss.py index 06f28d385c9d2..4a7d65f9120a9 100644 --- a/libs/community/langchain_community/vectorstores/faiss.py +++ b/libs/community/langchain_community/vectorstores/faiss.py @@ -333,8 +333,9 @@ def filter_func(metadata): # type: ignore[no-untyped-def] doc = self.docstore.search(_id) if not isinstance(doc, Document): raise ValueError(f"Could not find document for id {_id}, got {doc}") - if filter is not None and filter_func(doc.metadata): - docs.append((doc, scores[0][j])) + if filter is not None: + if filter_func(doc.metadata): + docs.append((doc, scores[0][j])) else: docs.append((doc, scores[0][j])) @@ -920,11 +921,13 @@ def __from( else: # Default to L2, currently other metric types not initialized. index = faiss.IndexFlatL2(len(embeddings[0])) + docstore = kwargs.pop("docstore", InMemoryDocstore()) + index_to_docstore_id = kwargs.pop("index_to_docstore_id", {}) vecstore = cls( embedding, index, - InMemoryDocstore(), - {}, + docstore, + index_to_docstore_id, normalize_L2=normalize_L2, distance_strategy=distance_strategy, **kwargs, diff --git a/libs/community/langchain_community/vectorstores/lancedb.py b/libs/community/langchain_community/vectorstores/lancedb.py index 4ca68c92ca66b..414517793ee44 100644 --- a/libs/community/langchain_community/vectorstores/lancedb.py +++ b/libs/community/langchain_community/vectorstores/lancedb.py @@ -12,6 +12,18 @@ class LanceDB(VectorStore): """`LanceDB` vector store. To use, you should have ``lancedb`` python package installed. + You can install it with ``pip install lancedb``. + + Args: + connection: LanceDB connection to use. If not provided, a new connection + will be created. + embedding: Embedding to use for the vectorstore. + vector_key: Key to use for the vector in the database. Defaults to ``vector``. + id_key: Key to use for the id in the database. Defaults to ``id``. + text_key: Key to use for the text in the database. Defaults to ``text``. + table_name: Name of the table to use. Defaults to ``vectorstore``. + + Example: .. code-block:: python @@ -25,13 +37,14 @@ class LanceDB(VectorStore): def __init__( self, - connection: Any, - embedding: Embeddings, + connection: Optional[Any] = None, + embedding: Optional[Embeddings] = None, vector_key: Optional[str] = "vector", id_key: Optional[str] = "id", text_key: Optional[str] = "text", + table_name: Optional[str] = "vectorstore", ): - """Initialize with Lance DB connection""" + """Initialize with Lance DB vectorstore""" try: import lancedb except ImportError: @@ -39,19 +52,28 @@ def __init__( "Could not import lancedb python package. " "Please install it with `pip install lancedb`." ) - if not isinstance(connection, lancedb.db.LanceTable): - raise ValueError( - "connection should be an instance of lancedb.db.LanceTable, ", - f"got {type(connection)}", - ) - self._connection = connection + self.lancedb = lancedb self._embedding = embedding self._vector_key = vector_key self._id_key = id_key self._text_key = text_key + self._table_name = table_name + + if self._embedding is None: + raise ValueError("embedding should be provided") + + if connection is not None: + if not isinstance(connection, lancedb.db.LanceTable): + raise ValueError( + "connection should be an instance of lancedb.db.LanceTable, ", + f"got {type(connection)}", + ) + self._connection = connection + else: + self._connection = self._init_table() @property - def embeddings(self) -> Embeddings: + def embeddings(self) -> Optional[Embeddings]: return self._embedding def add_texts( @@ -74,7 +96,7 @@ def add_texts( # Embed texts and create documents docs = [] ids = ids or [str(uuid.uuid4()) for _ in texts] - embeddings = self._embedding.embed_documents(list(texts)) + embeddings = self._embedding.embed_documents(list(texts)) # type: ignore for idx, text in enumerate(texts): embedding = embeddings[idx] metadata = metadatas[idx] if metadatas else {} @@ -86,7 +108,6 @@ def add_texts( **metadata, } ) - self._connection.add(docs) return ids @@ -102,14 +123,23 @@ def similarity_search( Returns: List of documents most similar to the query. """ - embedding = self._embedding.embed_query(query) - docs = self._connection.search(embedding).limit(k).to_df() + embedding = self._embedding.embed_query(query) # type: ignore + docs = ( + self._connection.search(embedding, vector_column_name=self._vector_key) + .limit(k) + .to_arrow() + ) + columns = docs.schema.names return [ Document( - page_content=row[self._text_key], - metadata=row[docs.columns != self._text_key], + page_content=docs[self._text_key][idx].as_py(), + metadata={ + col: docs[col][idx].as_py() + for col in columns + if col != self._text_key + }, ) - for _, row in docs.iterrows() + for idx in range(len(docs)) ] @classmethod @@ -134,3 +164,23 @@ def from_texts( instance.add_texts(texts, metadatas=metadatas, **kwargs) return instance + + def _init_table(self) -> Any: + import pyarrow as pa + + schema = pa.schema( + [ + pa.field( + self._vector_key, + pa.list_( + pa.float32(), + len(self.embeddings.embed_query("test")), # type: ignore + ), + ), + pa.field(self._id_key, pa.string()), + pa.field(self._text_key, pa.string()), + ] + ) + db = self.lancedb.connect("/tmp/lancedb") + tbl = db.create_table(self._table_name, schema=schema, mode="overwrite") + return tbl diff --git a/libs/community/langchain_community/vectorstores/rocksetdb.py b/libs/community/langchain_community/vectorstores/rocksetdb.py index 992f53db081dd..ffb9f7f7f2006 100644 --- a/libs/community/langchain_community/vectorstores/rocksetdb.py +++ b/libs/community/langchain_community/vectorstores/rocksetdb.py @@ -1,11 +1,13 @@ from __future__ import annotations import logging +from copy import deepcopy from enum import Enum from typing import Any, Iterable, List, Optional, Tuple from langchain_core.documents import Document from langchain_core.embeddings import Embeddings +from langchain_core.runnables import run_in_executor from langchain_core.vectorstores import VectorStore logger = logging.getLogger(__name__) @@ -122,7 +124,7 @@ def add_texts( batch = [] doc = {} if metadatas and len(metadatas) > i: - doc = metadatas[i] + doc = deepcopy(metadatas[i]) if ids and len(ids) > i: doc["_id"] = ids[i] doc[self._text_key] = text @@ -332,3 +334,19 @@ def delete_texts(self, ids: List[str]) -> None: data=[DeleteDocumentsRequestData(id=i) for i in ids], workspace=self._workspace, ) + + def delete(self, ids: Optional[List[str]] = None, **kwargs: Any) -> Optional[bool]: + try: + if ids is None: + ids = [] + self.delete_texts(ids) + except Exception as e: + logger.error("Exception when deleting docs from Rockset: %s\n", e) + return False + + return True + + async def adelete( + self, ids: Optional[List[str]] = None, **kwargs: Any + ) -> Optional[bool]: + return await run_in_executor(None, self.delete, ids, **kwargs) diff --git a/libs/community/langchain_community/vectorstores/singlestoredb.py b/libs/community/langchain_community/vectorstores/singlestoredb.py index c33858e965705..6d43b6199b8f7 100644 --- a/libs/community/langchain_community/vectorstores/singlestoredb.py +++ b/libs/community/langchain_community/vectorstores/singlestoredb.py @@ -57,6 +57,10 @@ def __init__( content_field: str = "content", metadata_field: str = "metadata", vector_field: str = "vector", + use_vector_index: bool = False, + vector_index_name: str = "", + vector_index_options: Optional[dict] = None, + vector_size: int = 1536, pool_size: int = 5, max_overflow: int = 10, timeout: float = 30, @@ -88,6 +92,27 @@ def __init__( vector_field (str, optional): Specifies the field to store the vector. Defaults to "vector". + use_vector_index (bool, optional): Toggles the use of a vector index. + Works only with SingleStoreDB 8.5 or later. Defaults to False. + If set to True, vector_size parameter is required to be set to + a proper value. + + vector_index_name (str, optional): Specifies the name of the vector index. + Defaults to empty. Will be ignored if use_vector_index is set to False. + + vector_index_options (dict, optional): Specifies the options for + the vector index. Defaults to {}. + Will be ignored if use_vector_index is set to False. The options are: + index_type (str, optional): Specifies the type of the index. + Defaults to IVF_PQFS. + For more options, please refer to the SingleStoreDB documentation: + https://docs.singlestore.com/cloud/reference/sql-reference/vector-functions/vector-indexing/ + + vector_size (int, optional): Specifies the size of the vector. + Defaults to 1536. Required if use_vector_index is set to True. + Should be set to the same value as the size of the vectors + stored in the vector_field. + Following arguments pertain to the connection pool: pool_size (int, optional): Determines the number of active connections in @@ -177,6 +202,19 @@ def __init__( os.environ['SINGLESTOREDB_URL'] = 'me:p455w0rd@s2-host.com/my_db' vectorstore = SingleStoreDB(OpenAIEmbeddings()) + + Using vector index: + + .. code-block:: python + + from langchain_community.embeddings import OpenAIEmbeddings + from langchain_community.vectorstores import SingleStoreDB + + os.environ['SINGLESTOREDB_URL'] = 'me:p455w0rd@s2-host.com/my_db' + vectorstore = SingleStoreDB( + OpenAIEmbeddings(), + use_vector_index=True, + ) """ self.embedding = embedding @@ -186,6 +224,12 @@ def __init__( self.metadata_field = self._sanitize_input(metadata_field) self.vector_field = self._sanitize_input(vector_field) + self.use_vector_index = bool(use_vector_index) + self.vector_index_name = self._sanitize_input(vector_index_name) + self.vector_index_options = dict(vector_index_options or {}) + self.vector_index_options["metric_type"] = self.distance_strategy + self.vector_size = int(vector_size) + # Pass the rest of the kwargs to the connection. self.connection_kwargs = kwargs @@ -194,7 +238,7 @@ def __init__( self.connection_kwargs["conn_attrs"] = dict() self.connection_kwargs["conn_attrs"]["_connector_name"] = "langchain python sdk" - self.connection_kwargs["conn_attrs"]["_connector_version"] = "1.0.1" + self.connection_kwargs["conn_attrs"]["_connector_version"] = "1.0.2" # Create connection pool. self.connection_pool = QueuePool( @@ -222,16 +266,38 @@ def _create_table(self: SingleStoreDB) -> None: try: cur = conn.cursor() try: - cur.execute( - """CREATE TABLE IF NOT EXISTS {} - ({} TEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci, - {} BLOB, {} JSON);""".format( - self.table_name, - self.content_field, - self.vector_field, - self.metadata_field, - ), - ) + if self.use_vector_index: + index_options = "" + if self.vector_index_options and len(self.vector_index_options) > 0: + index_options = "INDEX_OPTIONS '{}'".format( + json.dumps(self.vector_index_options) + ) + cur.execute( + """CREATE TABLE IF NOT EXISTS {} + ({} TEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci, + {} VECTOR({}, F32) NOT NULL, {} JSON, + VECTOR INDEX {} ({}) {});""".format( + self.table_name, + self.content_field, + self.vector_field, + self.vector_size, + self.metadata_field, + self.vector_index_name, + self.vector_field, + index_options, + ), + ) + else: + cur.execute( + """CREATE TABLE IF NOT EXISTS {} + ({} TEXT CHARACTER SET utf8mb4 COLLATE utf8mb4_general_ci, + {} BLOB, {} JSON);""".format( + self.table_name, + self.content_field, + self.vector_field, + self.metadata_field, + ), + ) finally: cur.close() finally: @@ -279,6 +345,8 @@ def add_texts( json.dumps(metadata), ), ) + if self.use_vector_index: + cur.execute("OPTIMIZE TABLE {} FLUSH;".format(self.table_name)) finally: cur.close() finally: @@ -406,6 +474,10 @@ def from_texts( content_field: str = "content", metadata_field: str = "metadata", vector_field: str = "vector", + use_vector_index: bool = False, + vector_index_name: str = "", + vector_index_options: Optional[dict] = None, + vector_size: int = 1536, pool_size: int = 5, max_overflow: int = 10, timeout: float = 30, @@ -438,6 +510,10 @@ def from_texts( pool_size=pool_size, max_overflow=max_overflow, timeout=timeout, + use_vector_index=use_vector_index, + vector_index_name=vector_index_name, + vector_index_options=vector_index_options, + vector_size=vector_size, **kwargs, ) instance.add_texts(texts, metadatas, embedding.embed_documents(texts), **kwargs) diff --git a/libs/community/langchain_community/vectorstores/surrealdb.py b/libs/community/langchain_community/vectorstores/surrealdb.py index ef65c5ec6b002..34f002305e1fd 100644 --- a/libs/community/langchain_community/vectorstores/surrealdb.py +++ b/libs/community/langchain_community/vectorstores/surrealdb.py @@ -116,6 +116,8 @@ async def aadd_texts( data = {"text": text, "embedding": embeddings[idx]} if metadatas is not None and idx < len(metadatas): data["metadata"] = metadatas[idx] # type: ignore[assignment] + else: + data["metadata"] = [] record = await self.sdb.create( self.collection, data, diff --git a/libs/community/poetry.lock b/libs/community/poetry.lock index c5ea8eae37ae4..be5c69eebb2ad 100644 --- a/libs/community/poetry.lock +++ b/libs/community/poetry.lock @@ -3650,7 +3650,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.21" +version = "0.1.24" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -3660,7 +3660,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = "^0.0.87" +langsmith = "^0.1.0" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -3676,13 +3676,13 @@ url = "../core" [[package]] name = "langsmith" -version = "0.0.87" +version = "0.1.1" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, - {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, ] [package.dependencies] @@ -8123,6 +8123,165 @@ files = [ docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<7.5)", "pytest-mock", "pytest-mypy-testing"] +[[package]] +name = "tree-sitter" +version = "0.20.4" +description = "Python bindings for the Tree-Sitter parsing library" +optional = true +python-versions = ">=3.3" +files = [ + {file = "tree_sitter-0.20.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c259b9bcb596e54f54713eb3951226fc834d65289940f4bfdcdf519f08e8e876"}, + {file = "tree_sitter-0.20.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88da7e2e4c69881cd63916cc24ae0b809f96aae331da45b418ae6b2d1ed2ca19"}, + {file = "tree_sitter-0.20.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66a68b156ba131e9d8dff4a1f72037f4b368cc50c58f18905a91743ae1d1c795"}, + {file = "tree_sitter-0.20.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae28e25d551f406807011487bdfb9728041e656b30b554fa7f3391ab64ed69f9"}, + {file = "tree_sitter-0.20.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36b10c9c69e825ba65cf9b0f77668bf33e70d2a5764b64ad6f133f8cc9220f09"}, + {file = "tree_sitter-0.20.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7c18c64ddd44b75b7e1660b9793753eda427e4b145b6216d4b2d2e9b200c74f2"}, + {file = "tree_sitter-0.20.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e9e9e594bbefb76ad9ea256f5c87eba7591b4758854d3df83ce4df415933a006"}, + {file = "tree_sitter-0.20.4-cp310-cp310-win32.whl", hash = "sha256:b4755229dc18644fe48bcab974bde09b171fcb6ef625d3cb5ece5c6198f4223e"}, + {file = "tree_sitter-0.20.4-cp310-cp310-win_amd64.whl", hash = "sha256:f792684cee8a46d9194d9f4223810e54ccc704470c5777538d59fbde0a4c91bf"}, + {file = "tree_sitter-0.20.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9d22ee75f45836554ee6a11e50dd8f9827941e67c49fce9a0790245b899811a9"}, + {file = "tree_sitter-0.20.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a0ffd76dd991ba745bb5d0ba1d583bec85726d3ddef8c9685dc8636a619adde"}, + {file = "tree_sitter-0.20.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:060d4e5b803be0975f1ac46e54a292eab0701296ccd912f6cdac3f7331e29143"}, + {file = "tree_sitter-0.20.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:822e02366dbf223697b2b56b8f91aa5b60571f9fe7c998988a381db1c69604e9"}, + {file = "tree_sitter-0.20.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:527ca72c6a8f60fa719af37fa86f58b7ad0e07b8f74d1c1c7e926c5c888a7e6b"}, + {file = "tree_sitter-0.20.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a418ca71309ea7052e076f08d623f33f58eae01a8e8cdc1e6d3a01b5b8ddebfe"}, + {file = "tree_sitter-0.20.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c3ba2561b61a83c28ca06a0bce2a5ffcfb6b39f9d27a45e5ebd9cad2bedb7f"}, + {file = "tree_sitter-0.20.4-cp311-cp311-win32.whl", hash = "sha256:8d04c75a389b2de94952d602264852acff8cd3ed1ccf8a2492a080973d5ddd58"}, + {file = "tree_sitter-0.20.4-cp311-cp311-win_amd64.whl", hash = "sha256:ba9215c0e7529d9eb370528e5d99b7389d14a7eae94f07d14fa9dab18f267c62"}, + {file = "tree_sitter-0.20.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c4c1af5ed4306071d30970c83ec882520a7bf5d8053996dbc4aa5c59238d4990"}, + {file = "tree_sitter-0.20.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9d70bfa550cf22c9cea9b3c0d18b889fc4f2a7e9dcf1d6cc93f49fa9d4a94954"}, + {file = "tree_sitter-0.20.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6de537bca0641775d8d175d37303d54998980fc0d997dd9aa89e16b415bf0cc3"}, + {file = "tree_sitter-0.20.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1c0f8c0e3e50267566f5116cdceedf4e23e8c08b55ef3becbe954a11b16e84"}, + {file = "tree_sitter-0.20.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef2ee6d9bb8e21713949e5ff769ed670fe1217f95b7eeb6c675788438c1e6e"}, + {file = "tree_sitter-0.20.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b6fd1c881ab0de5faa67168db2d001eee32be5482cb4e0b21b217689a05b6fe4"}, + {file = "tree_sitter-0.20.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf47047420021d50aec529cb66387c90562350b499ddf56ecef1fc8255439e30"}, + {file = "tree_sitter-0.20.4-cp312-cp312-win32.whl", hash = "sha256:c16b48378041fc9702b6aa3480f2ffa49ca8ea58141a862acd569e5a0679655f"}, + {file = "tree_sitter-0.20.4-cp312-cp312-win_amd64.whl", hash = "sha256:973e871167079a1b1d7304d361449253efbe2a6974728ad563cf407bd02ddccb"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9d33a55598dd18a4d8b869a3417de82a4812c3a7dc7e61cb025ece3e9c3e4e96"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cee6955c2c97fc5927a41c7a8b06647c4b4d9b99b8a1581bf1183435c8cec3e"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5022bea67e479ad212be7c05b983a72e297a013efb4e8ea5b5b4d7da79a9fdef"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:640f60a5b966f0990338f1bf559455c3dcb822bc4329d82b3d42f32a48374dfe"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0e83f641fe6f27d91bd4d259fff5d35de1567d3f581b9efe9bbd5be50fe4ddc7"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-win32.whl", hash = "sha256:ce6a85027c66fa3f09d482cc6d41927ea40955f7f33b86aedd26dd932709a2c9"}, + {file = "tree_sitter-0.20.4-cp36-cp36m-win_amd64.whl", hash = "sha256:fe10779347a6c067af29cb37fd4b75fa96c5cb68f587cc9530b70fe3f2a51a55"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28d5f84e34e276887e3a240b60906ca7e2b51e975f3145c3149ceed977a69508"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c913b65cbe10996116988ac436748f24883b5097e58274223e89bb2c5d1bb1a"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecaed46241e071752195a628bb97d2b740f2fde9e34f8a74456a4ea8bb26df88"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b641e88a97eab002a1736d93ef5a4beac90ea4fd6e25affd1831319b99f456c9"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:327c40f439c6155e4eee54c4657e4701a04f5f4816d9defdcb836bf65bf83d21"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-win32.whl", hash = "sha256:1b7c1d95f006b3de42fbf4045bd00c273d113e372fcb6a5378e74ed120c12032"}, + {file = "tree_sitter-0.20.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6140d037239a41046f5d34fba5e0374ee697adb4b48b90579c618b5402781c11"}, + {file = "tree_sitter-0.20.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f42fd1104efaad8151370f1936e2a488b7337a5d24544a9ab59ba4c4010b1272"}, + {file = "tree_sitter-0.20.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7859717c5d62ee386b3d036cab8ed0f88f8c027b6b4ae476a55a8c5fb8aab713"}, + {file = "tree_sitter-0.20.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fdd361fe1cc68db68b4d85165641275e34b86cc26b2bab932790204fa14824dc"}, + {file = "tree_sitter-0.20.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b8d7539075606027b67764543463ff2bc4e52f4158ef6dc419c9f5625aa5383"}, + {file = "tree_sitter-0.20.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78e76307f05aca6cde72f3307b4d53701f34ae45f2248ceb83d1626051e201fd"}, + {file = "tree_sitter-0.20.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dd8c352f4577f61098d06cf3feb7fd214259f41b5036b81003860ed54d16b448"}, + {file = "tree_sitter-0.20.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:281f3e5382d1bd7fccc88d1afe68c915565bc24f8b8dd4844079d46c7815b8a7"}, + {file = "tree_sitter-0.20.4-cp38-cp38-win32.whl", hash = "sha256:6a77ac3cdcddd80cdd1fd394318bff99f94f37e08d235aaefccb87e1224946e5"}, + {file = "tree_sitter-0.20.4-cp38-cp38-win_amd64.whl", hash = "sha256:8eee8adf54033dc48eab84b040f4d7b32355a964c4ae0aae5dfbdc4dbc3364ca"}, + {file = "tree_sitter-0.20.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e89f6508e30fce05e2c724725d022db30d877817b9d64f933506ffb3a3f4a2c2"}, + {file = "tree_sitter-0.20.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7fb6286bb1fae663c45ff0700ec88fb9b50a81eed2bae8a291f95fcf8cc19547"}, + {file = "tree_sitter-0.20.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11e93f8b4bbae04070416a82257a7ab2eb0afb76e093ae3ea73bd63b792f6846"}, + {file = "tree_sitter-0.20.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8250725c5f78929aeb2c71db5dca76f1ef448389ca16f9439161f90978bb8478"}, + {file = "tree_sitter-0.20.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d404a8ca9de9b0843844f0cd4d423f46bc46375ab8afb63b1d8ec01201457ac8"}, + {file = "tree_sitter-0.20.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0f2422c9ee70ba972dfc3943746e6cf7fc03725a866908950245bda9ccfc7301"}, + {file = "tree_sitter-0.20.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:21a937942e4729abbe778a609d2c218574436cb351c36fba89ef3c8c6066ec78"}, + {file = "tree_sitter-0.20.4-cp39-cp39-win32.whl", hash = "sha256:427a9a39360cc1816e28f8182550e478e4ba983595a2565ab9dfe32ea1b03fd7"}, + {file = "tree_sitter-0.20.4-cp39-cp39-win_amd64.whl", hash = "sha256:7095bb9aff297fa9c6026bf8914fd295997d714d1a6ee9a1edf7282c772f9f64"}, + {file = "tree_sitter-0.20.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:859260b90f0e3867ae840e39f54e830f607b3bc531bc21deeeeaa8a30cbb89ad"}, + {file = "tree_sitter-0.20.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dfc14be73cf46126660a3aecdd0396e69562ad1a902245225ca7bd29649594e"}, + {file = "tree_sitter-0.20.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec46355bf3ff23f54d5e365871ffd3e05cfbc65d1b36a8be7c0bcbda30a1d43"}, + {file = "tree_sitter-0.20.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d933a942fde39876b99c36f12aa3764e4a555ae9366c10ce6cca8c16341c1bbf"}, + {file = "tree_sitter-0.20.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a7eec3b55135fe851a38fa248c9fd75fc3d58ceb6e1865b795e416e4d598c2a1"}, + {file = "tree_sitter-0.20.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc76225529ee14a53e84413480ce81ec3c44eaa0455c140e961c90ac3118ead"}, + {file = "tree_sitter-0.20.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf0396e47efffc0b528959a8f2e2346a98297579f867e9e1834c2aad4be829c"}, + {file = "tree_sitter-0.20.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:a15fbabd3bc8e29c48289c156d743e69f5ec72bb125cf44f7adbdaa1937c3da6"}, + {file = "tree_sitter-0.20.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:36f8adf2126f496cf376b6e4b707cba061c25beb17841727eef6f0e083e53e1f"}, + {file = "tree_sitter-0.20.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841efb40c116ab0a066924925409a8a4dcffeb39a151c0b2a1c2abe56ad4fb42"}, + {file = "tree_sitter-0.20.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2051e8a70fd8426f27a43dad71d11929a62ce30a9b1eb65bba0ed79e82481592"}, + {file = "tree_sitter-0.20.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:99a3c2824d4cfcffd9f961176891426bde2cb36ece5280c61480be93319c23c4"}, + {file = "tree_sitter-0.20.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:72830dc85a10430eca3d56739b7efcd7a05459c8d425f08c1aee6179ab7f13a9"}, + {file = "tree_sitter-0.20.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4992dd226055b6cd0a4f5661c66b799a73d3eff716302e0f7ab06594ee12d49f"}, + {file = "tree_sitter-0.20.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a66d95bbf92175cdc295d6d77f330942811f02e3aaf3fc64431cb749683b2f7d"}, + {file = "tree_sitter-0.20.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a25b1087e4f7825b2458dacf5f4b0be2938f78e850e822edca1ff4994b56081a"}, + {file = "tree_sitter-0.20.4.tar.gz", hash = "sha256:6adb123e2f3e56399bbf2359924633c882cc40ee8344885200bca0922f713be5"}, +] + +[package.dependencies] +setuptools = {version = ">=60.0.0", markers = "python_version >= \"3.12\""} + +[[package]] +name = "tree-sitter-languages" +version = "1.10.2" +description = "Binary Python wheels for all tree sitter languages." +optional = true +python-versions = "*" +files = [ + {file = "tree_sitter_languages-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5580348f0b20233b1d5431fa178ccd3d07423ca4a3275df02a44608fd72344b9"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:103c7466644486b1e9e03850df46fc6aa12f13ca636c74f173270276220ac80b"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d13db84511c6f1a7dc40383b66deafa74dabd8b877e3d65ab253f3719eccafd6"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57adfa32be7e465b54aa72f915f6c78a2b66b227df4f656b5d4fbd1ca7a92b3f"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c6385e033e460ceb8f33f3f940335f422ef2b763700a04f0089391a68b56153"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dfa3f38cc5381c5aba01dd7494f59b8a9050e82ff6e06e1233e3a0cbae297e3c"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9f195155acf47f8bc5de7cee46ecd07b2f5697f007ba89435b51ef4c0b953ea5"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2de330e2ac6d7426ca025a3ec0f10d5640c3682c1d0c7702e812dcfb44b58120"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-win32.whl", hash = "sha256:c9731cf745f135d9770eeba9bb4e2ff4dabc107b5ae9b8211e919f6b9100ea6d"}, + {file = "tree_sitter_languages-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:6dd75851c41d0c3c4987a9b7692d90fa8848706c23115669d8224ffd6571e357"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7eb7d7542b2091c875fe52719209631fca36f8c10fa66970d2c576ae6a1b8289"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6b41bcb00974b1c8a1800c7f1bb476a1d15a0463e760ee24872f2d53b08ee424"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f370cd7845c6c81df05680d5bd96db8a99d32b56f4728c5d05978911130a853"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1dc195c88ef4c72607e112a809a69190e096a2e5ebc6201548b3e05fdd169ad"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ae34ac314a7170be24998a0f994c1ac80761d8d4bd126af27ee53a023d3b849"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:01b5742d5f5bd675489486b582bd482215880b26dde042c067f8265a6e925d9c"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ab1cbc46244d34fd16f21edaa20231b2a57f09f092a06ee3d469f3117e6eb954"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b1149e7467a4e92b8a70e6005fe762f880f493cf811fc003554b29f04f5e7c8"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-win32.whl", hash = "sha256:049276343962f4696390ee555acc2c1a65873270c66a6cbe5cb0bca83bcdf3c6"}, + {file = "tree_sitter_languages-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:7f3fdd468a577f04db3b63454d939e26e360229b53c80361920aa1ebf2cd7491"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c0f4c8b2734c45859edc7fcaaeaab97a074114111b5ba51ab4ec7ed52104763c"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:eecd3c1244ac3425b7a82ba9125b4ddb45d953bbe61de114c0334fd89b7fe782"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15db3c8510bc39a80147ee7421bf4782c15c09581c1dc2237ea89cefbd95b846"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92c6487a6feea683154d3e06e6db68c30e0ae749a7ce4ce90b9e4e46b78c85c7"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2f1cd1d1bdd65332f9c2b67d49dcf148cf1ded752851d159ac3e5ee4f4d260"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:976c8039165b8e12f17a01ddee9f4e23ec6e352b165ad29b44d2bf04e2fbe77e"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:dafbbdf16bf668a580902e1620f4baa1913e79438abcce721a50647564c687b9"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1aeabd3d60d6d276b73cd8f3739d595b1299d123cc079a317f1a5b3c5461e2ca"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-win32.whl", hash = "sha256:fab8ee641914098e8933b87ea3d657bea4dd00723c1ee7038b847b12eeeef4f5"}, + {file = "tree_sitter_languages-1.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e606430d736367e5787fa5a7a0c5a1ec9b85eded0b3596bbc0d83532a40810b"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:838d5b48a7ed7a17658721952c77fda4570d2a069f933502653b17e15a9c39c9"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b3c71b1d278c2889e018ee77b8ee05c384e2e3334dec798f8b611c4ab2d1e"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:faa00abcb2c819027df58472da055d22fa7dfcb77c77413d8500c32ebe24d38b"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e102fbbf02322d9201a86a814e79a9734ac80679fdb9682144479044f401a73"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0b87cf1a7b03174ba18dfd81582be82bfed26803aebfe222bd20e444aba003"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c0f1b9af9cb67f0b942b020da9fdd000aad5e92f2383ae0ba7a330b318d31912"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:5a4076c921f7a4d31e643843de7dfe040b65b63a238a5aa8d31d93aabe6572aa"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-win32.whl", hash = "sha256:fa6391a3a5d83d32db80815161237b67d70576f090ce5f38339206e917a6f8bd"}, + {file = "tree_sitter_languages-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:55649d3f254585a064121513627cf9788c1cfdadbc5f097f33d5ba750685a4c0"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6f85d1edaa2d22d80d4ea5b6d12b95cf3644017b6c227d0d42854439e02e8893"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d78feed4a764ef3141cb54bf00fe94d514d8b6e26e09423e23b4c616fcb7938c"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da1aca27531f9dd5308637d76643372856f0f65d0d28677d1bcf4211e8ed1ad0"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1031ea440dafb72237437d754eff8940153a3b051e3d18932ac25e75ce060a15"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99d3249beaef2c9fe558ecc9a97853c260433a849dcc68266d9770d196c2e102"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:59a4450f262a55148fb7e68681522f0c2a2f6b7d89666312a2b32708d8f416e1"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ce74eab0e430370d5e15a96b6c6205f93405c177a8b2e71e1526643b2fb9bab1"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9b4dd2b6b3d24c85dffe33d6c343448869eaf4f41c19ddba662eb5d65d8808f4"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-win32.whl", hash = "sha256:92d734fb968fe3927a7596d9f0459f81a8fa7b07e16569476b28e27d0d753348"}, + {file = "tree_sitter_languages-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:46a13f7d38f2eeb75f7cf127d1201346093748c270d686131f0cbc50e42870a1"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f8c6a936ae99fdd8857e91f86c11c2f5e507ff30631d141d98132bb7ab2c8638"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c283a61423f49cdfa7b5a5dfbb39221e3bd126fca33479cd80749d4d7a6b7349"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e60be6bdcff923386a54a5edcb6ff33fc38ab0118636a762024fa2bc98de55"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c00069f9575bd831eabcce2cdfab158dde1ed151e7e5614c2d985ff7d78a7de1"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475ff53203d8a43ccb19bb322fa2fb200d764001cc037793f1fadd714bb343da"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26fe7c9c412e4141dea87ea4b3592fd12e385465b5bdab106b0d5125754d4f60"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8fed27319957458340f24fe14daad467cd45021da034eef583519f83113a8c5e"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3657a491a7f96cc75a3568ddd062d25f3be82b6a942c68801a7b226ff7130181"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-win32.whl", hash = "sha256:33f7d584d01a7a3c893072f34cfc64ec031f3cfe57eebc32da2f8ac046e101a7"}, + {file = "tree_sitter_languages-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:1b944af3ee729fa70fc8ae82224a9ff597cdb63addea084e0ea2fa2b0ec39bb7"}, +] + +[package.dependencies] +tree-sitter = "*" + [[package]] name = "typer" version = "0.9.0" @@ -8998,9 +9157,9 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [extras] cli = ["typer"] -extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "azure-ai-documentintelligence", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "databricks-vectorsearch", "datasets", "dgml-utils", "elasticsearch", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "gradientai", "hdbcli", "hologres-vector", "html2text", "httpx", "javelin-sdk", "jinja2", "jq", "jsonschema", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "nvidia-riva-client", "oci", "openai", "openapi-pydantic", "oracle-ads", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "rdflib", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "upstash-redis", "xata", "xmltodict", "zhipuai"] +extended-testing = ["aiosqlite", "aleph-alpha-client", "anthropic", "arxiv", "assemblyai", "atlassian-python-api", "azure-ai-documentintelligence", "beautifulsoup4", "bibtexparser", "cassio", "chardet", "cohere", "databricks-vectorsearch", "datasets", "dgml-utils", "elasticsearch", "esprima", "faiss-cpu", "feedparser", "fireworks-ai", "geopandas", "gitpython", "google-cloud-documentai", "gql", "gradientai", "hdbcli", "hologres-vector", "html2text", "httpx", "javelin-sdk", "jinja2", "jq", "jsonschema", "lxml", "markdownify", "motor", "msal", "mwparserfromhell", "mwxml", "newspaper3k", "numexpr", "nvidia-riva-client", "oci", "openai", "openapi-pydantic", "oracle-ads", "pandas", "pdfminer-six", "pgvector", "praw", "psychicapi", "py-trello", "pymupdf", "pypdf", "pypdfium2", "pyspark", "rank-bm25", "rapidfuzz", "rapidocr-onnxruntime", "rdflib", "requests-toolbelt", "rspace_client", "scikit-learn", "sqlite-vss", "streamlit", "sympy", "telethon", "timescale-vector", "tqdm", "tree-sitter", "tree-sitter-languages", "upstash-redis", "xata", "xmltodict", "zhipuai"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "fe633ab7d246239420a26bebf8bcf857edcf0778f75b3eb2a4b1314cb13645c8" +content-hash = "5fdd9b2eb766411463fa27e19433daf5d5325f2af01ddd93b6a594e3e02a31de" diff --git a/libs/community/pyproject.toml b/libs/community/pyproject.toml index ac6b9a3a9940d..abf8dc89b6384 100644 --- a/libs/community/pyproject.toml +++ b/libs/community/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-community" -version = "0.0.19" +version = "0.0.21" description = "Community contributed LangChain integrations." authors = [] license = "MIT" @@ -9,7 +9,7 @@ repository = "https://github.com/langchain-ai/langchain" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.1.21,<0.2" +langchain-core = ">=0.1.24,<0.2" SQLAlchemy = ">=1.4,<3" requests = "^2" PyYAML = ">=5.3" @@ -17,7 +17,7 @@ numpy = "^1" aiohttp = "^3.8.3" tenacity = "^8.1.0" dataclasses-json = ">= 0.5.7, < 0.7" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.1.0" tqdm = {version = ">=4.48.0", optional = true} openapi-pydantic = {version = "^0.3.2", optional = true} faiss-cpu = {version = "^1", optional = true} @@ -83,6 +83,8 @@ msal = {version = "^1.25.0", optional = true} databricks-vectorsearch = {version = "^0.21", optional = true} dgml-utils = {version = "^0.3.0", optional = true} datasets = {version = "^2.15.0", optional = true} +tree-sitter = {version = "^0.20.2", optional = true} +tree-sitter-languages = {version = "^1.8.0", optional = true} azure-ai-documentintelligence = {version = "^1.0.0b1", optional = true} oracle-ads = {version = "^2.9.1", optional = true} zhipuai = {version = "^1.0.7", optional = true} @@ -177,7 +179,6 @@ setuptools = "^67.6.1" langchain-core = {path = "../core", develop = true} [tool.poetry.extras] - cli = ["typer"] # An extra used to be able to add extended testing. @@ -249,6 +250,8 @@ extended_testing = [ "databricks-vectorsearch", "dgml-utils", "cohere", + "tree-sitter", + "tree-sitter-languages", "azure-ai-documentintelligence", "oracle-ads", "zhipuai", diff --git a/libs/community/tests/examples/test_empty.csv b/libs/community/tests/examples/test_empty.csv new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/community/tests/examples/test_nominal.csv b/libs/community/tests/examples/test_nominal.csv new file mode 100644 index 0000000000000..65debb11207c2 --- /dev/null +++ b/libs/community/tests/examples/test_nominal.csv @@ -0,0 +1,3 @@ +column1,column2,column3 +value1,value2,value3 +value4,value5,value6 \ No newline at end of file diff --git a/libs/community/tests/integration_tests/chat_models/test_yuan2.py b/libs/community/tests/integration_tests/chat_models/test_yuan2.py new file mode 100644 index 0000000000000..17a1c40a0792a --- /dev/null +++ b/libs/community/tests/integration_tests/chat_models/test_yuan2.py @@ -0,0 +1,152 @@ +"""Test ChatYuan2 wrapper.""" +from typing import List + +import pytest +from langchain_core.callbacks import CallbackManager +from langchain_core.messages import BaseMessage, HumanMessage, SystemMessage +from langchain_core.outputs import ( + ChatGeneration, + LLMResult, +) + +from langchain_community.chat_models.yuan2 import ChatYuan2 +from tests.unit_tests.callbacks.fake_callback_handler import FakeCallbackHandler + + +@pytest.mark.scheduled +def test_chat_yuan2() -> None: + """Test ChatYuan2 wrapper.""" + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=False, + ) + messages = [ + HumanMessage(content="Hello"), + ] + response = chat(messages) + assert isinstance(response, BaseMessage) + assert isinstance(response.content, str) + + +def test_chat_yuan2_system_message() -> None: + """Test ChatYuan2 wrapper with system message.""" + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=False, + ) + messages = [ + SystemMessage(content="You are an AI assistant."), + HumanMessage(content="Hello"), + ] + response = chat(messages) + assert isinstance(response, BaseMessage) + assert isinstance(response.content, str) + + +@pytest.mark.scheduled +def test_chat_yuan2_generate() -> None: + """Test ChatYuan2 wrapper with generate.""" + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=False, + ) + messages: List = [ + HumanMessage(content="Hello"), + ] + response = chat.generate([messages]) + assert isinstance(response, LLMResult) + assert len(response.generations) == 1 + assert response.llm_output + generation = response.generations[0] + for gen in generation: + assert isinstance(gen, ChatGeneration) + assert isinstance(gen.text, str) + assert gen.text == gen.message.content + + +@pytest.mark.scheduled +def test_chat_yuan2_streaming() -> None: + """Test that streaming correctly invokes on_llm_new_token callback.""" + callback_handler = FakeCallbackHandler() + callback_manager = CallbackManager([callback_handler]) + + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=True, + callback_manager=callback_manager, + ) + messages = [ + HumanMessage(content="Hello"), + ] + response = chat(messages) + assert callback_handler.llm_streams > 0 + assert isinstance(response, BaseMessage) + + +@pytest.mark.asyncio +async def test_async_chat_yuan2() -> None: + """Test async generation.""" + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=False, + ) + messages: List = [ + HumanMessage(content="Hello"), + ] + response = await chat.agenerate([messages]) + assert isinstance(response, LLMResult) + assert len(response.generations) == 1 + generations = response.generations[0] + for generation in generations: + assert isinstance(generation, ChatGeneration) + assert isinstance(generation.text, str) + assert generation.text == generation.message.content + + +@pytest.mark.asyncio +async def test_async_chat_yuan2_streaming() -> None: + """Test that streaming correctly invokes on_llm_new_token callback.""" + callback_handler = FakeCallbackHandler() + callback_manager = CallbackManager([callback_handler]) + + chat = ChatYuan2( + yuan2_api_key="EMPTY", + yuan2_api_base="http://127.0.0.1:8001/v1", + temperature=1.0, + model_name="yuan2", + max_retries=3, + streaming=True, + callback_manager=callback_manager, + ) + messages: List = [ + HumanMessage(content="Hello"), + ] + response = await chat.agenerate([messages]) + assert callback_handler.llm_streams > 0 + assert isinstance(response, LLMResult) + assert len(response.generations) == 1 + generations = response.generations[0] + for generation in generations: + assert isinstance(generation, ChatGeneration) + assert isinstance(generation.text, str) + assert generation.text == generation.message.content diff --git a/libs/community/tests/integration_tests/document_loaders/test_astradb.py b/libs/community/tests/integration_tests/document_loaders/test_astradb.py index 8f9146aacb51f..b0a1104f82316 100644 --- a/libs/community/tests/integration_tests/document_loaders/test_astradb.py +++ b/libs/community/tests/integration_tests/document_loaders/test_astradb.py @@ -15,7 +15,7 @@ import json import os import uuid -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, AsyncIterator, Iterator import pytest @@ -37,12 +37,12 @@ def _has_env_vars() -> bool: @pytest.fixture -def astra_db_collection() -> AstraDBCollection: +def astra_db_collection() -> Iterator[AstraDBCollection]: from astrapy.db import AstraDB astra_db = AstraDB( - token=ASTRA_DB_APPLICATION_TOKEN, - api_endpoint=ASTRA_DB_API_ENDPOINT, + token=ASTRA_DB_APPLICATION_TOKEN or "", + api_endpoint=ASTRA_DB_API_ENDPOINT or "", namespace=ASTRA_DB_KEYSPACE, ) collection_name = f"lc_test_loader_{str(uuid.uuid4()).split('-')[0]}" @@ -58,12 +58,12 @@ def astra_db_collection() -> AstraDBCollection: @pytest.fixture -async def async_astra_db_collection() -> AsyncAstraDBCollection: +async def async_astra_db_collection() -> AsyncIterator[AsyncAstraDBCollection]: from astrapy.db import AsyncAstraDB astra_db = AsyncAstraDB( - token=ASTRA_DB_APPLICATION_TOKEN, - api_endpoint=ASTRA_DB_API_ENDPOINT, + token=ASTRA_DB_APPLICATION_TOKEN or "", + api_endpoint=ASTRA_DB_API_ENDPOINT or "", namespace=ASTRA_DB_KEYSPACE, ) collection_name = f"lc_test_loader_{str(uuid.uuid4()).split('-')[0]}" @@ -167,5 +167,5 @@ async def test_extraction_function_async( find_options={"limit": 30}, extraction_function=lambda x: x["foo"], ) - doc = await anext(loader.alazy_load()) # type: ignore[name-defined] + doc = await loader.alazy_load().__anext__() assert doc.page_content == "bar" diff --git a/libs/community/tests/integration_tests/document_loaders/test_cassandra.py b/libs/community/tests/integration_tests/document_loaders/test_cassandra.py index 5562188eced86..a93a6abba6824 100644 --- a/libs/community/tests/integration_tests/document_loaders/test_cassandra.py +++ b/libs/community/tests/integration_tests/document_loaders/test_cassandra.py @@ -2,7 +2,7 @@ Test of Cassandra document loader class `CassandraLoader` """ import os -from typing import Any +from typing import Any, Iterator import pytest from langchain_core.documents import Document @@ -14,7 +14,7 @@ @pytest.fixture(autouse=True, scope="session") -def keyspace() -> str: # type: ignore[misc] +def keyspace() -> Iterator[str]: import cassio from cassandra.cluster import Cluster from cassio.config import check_resolve_session, resolve_keyspace diff --git a/libs/community/tests/integration_tests/document_loaders/test_github.py b/libs/community/tests/integration_tests/document_loaders/test_github.py index a3ad86a0f99cf..a711e2c142c5b 100644 --- a/libs/community/tests/integration_tests/document_loaders/test_github.py +++ b/libs/community/tests/integration_tests/document_loaders/test_github.py @@ -2,11 +2,17 @@ def test_issues_load() -> None: - title = "DocumentLoader for GitHub" + title = " Add caching to BaseChatModel (issue #1644)" loader = GitHubIssuesLoader( - repo="langchain-ai/langchain", creator="UmerHA", state="all" + repo="langchain-ai/langchain", + creator="UmerHA", + state="all", + per_page=3, + page=2, + access_token="""""", ) docs = loader.load() titles = [d.metadata["title"] for d in docs] assert title in titles assert all(doc.metadata["creator"] == "UmerHA" for doc in docs) + assert len(docs) == 3 diff --git a/libs/community/tests/integration_tests/graphs/test_kuzu.py b/libs/community/tests/integration_tests/graphs/test_kuzu.py index 10a35f2175ea1..20d002d55d61c 100644 --- a/libs/community/tests/integration_tests/graphs/test_kuzu.py +++ b/libs/community/tests/integration_tests/graphs/test_kuzu.py @@ -4,8 +4,7 @@ from langchain_community.graphs import KuzuGraph -EXPECTED_SCHEMA = """ -Node properties: [{'properties': [('name', 'STRING')], 'label': 'Movie'}, {'properties': [('name', 'STRING'), ('birthDate', 'STRING')], 'label': 'Person'}] +EXPECTED_SCHEMA = """Node properties: [{'properties': [('name', 'STRING')], 'label': 'Movie'}, {'properties': [('name', 'STRING'), ('birthDate', 'STRING')], 'label': 'Person'}] Relationships properties: [{'properties': [], 'label': 'ActedIn'}] Relationships: ['(:Person)-[:ActedIn]->(:Movie)'] """ # noqa: E501 @@ -36,7 +35,7 @@ def setUp(self) -> None: def tearDown(self) -> None: shutil.rmtree(self.tmpdir, ignore_errors=True) - def test_query(self) -> None: + def test_query_no_params(self) -> None: result = self.kuzu_graph.query("MATCH (n:Movie) RETURN n.name ORDER BY n.name") excepted_result = [ {"n.name": "The Godfather"}, @@ -45,6 +44,16 @@ def test_query(self) -> None: ] self.assertEqual(result, excepted_result) + def test_query_params(self) -> None: + result = self.kuzu_graph.query( + query="MATCH (n:Movie) WHERE n.name = $name RETURN n.name", + params={"name": "The Godfather"}, + ) + excepted_result = [ + {"n.name": "The Godfather"}, + ] + self.assertEqual(result, excepted_result) + def test_refresh_schema(self) -> None: self.conn.execute( "CREATE NODE TABLE Person (name STRING, birthDate STRING, PRIMARY " diff --git a/libs/community/tests/integration_tests/llms/test_huggingface_endpoint.py b/libs/community/tests/integration_tests/llms/test_huggingface_endpoint.py index ca89d54cde775..11af7df374269 100644 --- a/libs/community/tests/integration_tests/llms/test_huggingface_endpoint.py +++ b/libs/community/tests/integration_tests/llms/test_huggingface_endpoint.py @@ -1,6 +1,5 @@ -"""Test HuggingFace API wrapper.""" +"""Test HuggingFace Endpoints.""" -import unittest from pathlib import Path import pytest @@ -10,51 +9,73 @@ from tests.integration_tests.llms.utils import assert_llm_equality -@unittest.skip( - "This test requires an inference endpoint. Tested with Hugging Face endpoints" -) -def test_huggingface_endpoint_text_generation() -> None: - """Test valid call to HuggingFace text generation model.""" +def test_huggingface_endpoint_call_error() -> None: + """Test valid call to HuggingFace that errors.""" + llm = HuggingFaceEndpoint(endpoint_url="", model_kwargs={"max_new_tokens": -1}) + with pytest.raises(ValueError): + llm("Say foo:") + + +def test_saving_loading_endpoint_llm(tmp_path: Path) -> None: + """Test saving/loading an HuggingFaceHub LLM.""" llm = HuggingFaceEndpoint( endpoint_url="", task="text-generation", model_kwargs={"max_new_tokens": 10} ) + llm.save(file_path=tmp_path / "hf.yaml") + loaded_llm = load_llm(tmp_path / "hf.yaml") + assert_llm_equality(llm, loaded_llm) + + +def test_huggingface_text_generation() -> None: + """Test valid call to HuggingFace text generation model.""" + llm = HuggingFaceEndpoint(repo_id="gpt2", model_kwargs={"max_new_tokens": 10}) output = llm("Say foo:") print(output) # noqa: T201 assert isinstance(output, str) -@unittest.skip( - "This test requires an inference endpoint. Tested with Hugging Face endpoints" -) -def test_huggingface_endpoint_text2text_generation() -> None: +def test_huggingface_text2text_generation() -> None: """Test valid call to HuggingFace text2text model.""" - llm = HuggingFaceEndpoint(endpoint_url="", task="text2text-generation") + llm = HuggingFaceEndpoint(repo_id="google/flan-t5-xl") output = llm("The capital of New York is") assert output == "Albany" -@unittest.skip( - "This test requires an inference endpoint. Tested with Hugging Face endpoints" -) -def test_huggingface_endpoint_summarization() -> None: +def test_huggingface_summarization() -> None: """Test valid call to HuggingFace summarization model.""" - llm = HuggingFaceEndpoint(endpoint_url="", task="summarization") + llm = HuggingFaceEndpoint(repo_id="facebook/bart-large-cnn") output = llm("Say foo:") assert isinstance(output, str) -def test_huggingface_endpoint_call_error() -> None: +def test_huggingface_call_error() -> None: """Test valid call to HuggingFace that errors.""" - llm = HuggingFaceEndpoint(model_kwargs={"max_new_tokens": -1}) + llm = HuggingFaceEndpoint(repo_id="gpt2", model_kwargs={"max_new_tokens": -1}) with pytest.raises(ValueError): llm("Say foo:") -def test_saving_loading_endpoint_llm(tmp_path: Path) -> None: - """Test saving/loading an HuggingFaceHub LLM.""" - llm = HuggingFaceEndpoint( - endpoint_url="", task="text-generation", model_kwargs={"max_new_tokens": 10} - ) +def test_saving_loading_llm(tmp_path: Path) -> None: + """Test saving/loading an HuggingFaceEndpoint LLM.""" + llm = HuggingFaceEndpoint(repo_id="gpt2", model_kwargs={"max_new_tokens": 10}) llm.save(file_path=tmp_path / "hf.yaml") loaded_llm = load_llm(tmp_path / "hf.yaml") assert_llm_equality(llm, loaded_llm) + + +def test_invocation_params_stop_sequences() -> None: + llm = HuggingFaceEndpoint() + assert llm._default_params["stop_sequences"] == [] + + runtime_stop = None + assert llm._invocation_params(runtime_stop)["stop_sequences"] == [] + assert llm._default_params["stop_sequences"] == [] + + runtime_stop = ["stop"] + assert llm._invocation_params(runtime_stop)["stop_sequences"] == ["stop"] + assert llm._default_params["stop_sequences"] == [] + + llm = HuggingFaceEndpoint(stop_sequences=["."]) + runtime_stop = ["stop"] + assert llm._invocation_params(runtime_stop)["stop_sequences"] == [".", "stop"] + assert llm._default_params["stop_sequences"] == ["."] diff --git a/libs/community/tests/integration_tests/llms/test_llamafile.py b/libs/community/tests/integration_tests/llms/test_llamafile.py new file mode 100644 index 0000000000000..0f9c66c182296 --- /dev/null +++ b/libs/community/tests/integration_tests/llms/test_llamafile.py @@ -0,0 +1,46 @@ +import os +from typing import Generator + +import pytest +import requests +from requests.exceptions import ConnectionError, HTTPError + +from langchain_community.llms.llamafile import Llamafile + +LLAMAFILE_SERVER_BASE_URL = os.getenv( + "LLAMAFILE_SERVER_BASE_URL", "http://localhost:8080" +) + + +def _ping_llamafile_server() -> bool: + try: + response = requests.get(LLAMAFILE_SERVER_BASE_URL) + response.raise_for_status() + except (ConnectionError, HTTPError): + return False + + return True + + +@pytest.mark.skipif( + not _ping_llamafile_server(), + reason=f"unable to find llamafile server at {LLAMAFILE_SERVER_BASE_URL}, " + f"please start one and re-run this test", +) +def test_llamafile_call() -> None: + llm = Llamafile() + output = llm.invoke("Say foo:") + assert isinstance(output, str) + + +@pytest.mark.skipif( + not _ping_llamafile_server(), + reason=f"unable to find llamafile server at {LLAMAFILE_SERVER_BASE_URL}, " + f"please start one and re-run this test", +) +def test_llamafile_streaming() -> None: + llm = Llamafile(streaming=True) + generator = llm.stream("Tell me about Roman dodecahedrons.") + assert isinstance(generator, Generator) + for token in generator: + assert isinstance(token, str) diff --git a/libs/community/tests/integration_tests/llms/test_yuan2.py b/libs/community/tests/integration_tests/llms/test_yuan2.py new file mode 100644 index 0000000000000..94667819e66ad --- /dev/null +++ b/libs/community/tests/integration_tests/llms/test_yuan2.py @@ -0,0 +1,33 @@ +"""Test Yuan2.0 API wrapper.""" +from langchain_core.outputs import LLMResult + +from langchain_community.llms import Yuan2 + + +def test_yuan2_call_method() -> None: + """Test valid call to Yuan2.0.""" + llm = Yuan2( + infer_api="http://127.0.0.1:8000/yuan", + max_tokens=1024, + temp=1.0, + top_p=0.9, + top_k=40, + use_history=False, + ) + output = llm("ๅ†™ไธ€ๆฎตๅฟซ้€ŸๆŽ’ๅบ็ฎ—ๆณ•ใ€‚") + assert isinstance(output, str) + + +def test_yuan2_generate_method() -> None: + """Test valid call to Yuan2.0 inference api.""" + llm = Yuan2( + infer_api="http://127.0.0.1:8000/yuan", + max_tokens=1024, + temp=1.0, + top_p=0.9, + top_k=40, + use_history=False, + ) + output = llm.generate(["who are you?"]) + assert isinstance(output, LLMResult) + assert isinstance(output.generations, list) diff --git a/libs/community/tests/integration_tests/storage/test_astradb.py b/libs/community/tests/integration_tests/storage/test_astradb.py index 643b4e93a3185..63108ef0c84a7 100644 --- a/libs/community/tests/integration_tests/storage/test_astradb.py +++ b/libs/community/tests/integration_tests/storage/test_astradb.py @@ -1,9 +1,16 @@ """Implement integration tests for AstraDB storage.""" +from __future__ import annotations + import os +from typing import TYPE_CHECKING import pytest from langchain_community.storage.astradb import AstraDBByteStore, AstraDBStore +from langchain_community.utilities.astradb import SetupMode + +if TYPE_CHECKING: + from astrapy.db import AstraDB, AsyncAstraDB def _has_env_vars() -> bool: @@ -16,7 +23,7 @@ def _has_env_vars() -> bool: @pytest.fixture -def astra_db(): # type: ignore[no-untyped-def] +def astra_db() -> AstraDB: from astrapy.db import AstraDB return AstraDB( @@ -26,24 +33,45 @@ def astra_db(): # type: ignore[no-untyped-def] ) -def init_store(astra_db, collection_name: str): # type: ignore[no-untyped-def, no-untyped-def] - astra_db.create_collection(collection_name) +@pytest.fixture +def async_astra_db() -> AsyncAstraDB: + from astrapy.db import AsyncAstraDB + + return AsyncAstraDB( + token=os.environ["ASTRA_DB_APPLICATION_TOKEN"], + api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"], + namespace=os.environ.get("ASTRA_DB_KEYSPACE"), + ) + + +def init_store(astra_db: AstraDB, collection_name: str) -> AstraDBStore: store = AstraDBStore(collection_name=collection_name, astra_db_client=astra_db) store.mset([("key1", [0.1, 0.2]), ("key2", "value2")]) return store -def init_bytestore(astra_db, collection_name: str): # type: ignore[no-untyped-def, no-untyped-def] - astra_db.create_collection(collection_name) +def init_bytestore(astra_db: AstraDB, collection_name: str) -> AstraDBByteStore: store = AstraDBByteStore(collection_name=collection_name, astra_db_client=astra_db) store.mset([("key1", b"value1"), ("key2", b"value2")]) return store +async def init_async_store( + async_astra_db: AsyncAstraDB, collection_name: str +) -> AstraDBStore: + store = AstraDBStore( + collection_name=collection_name, + async_astra_db_client=async_astra_db, + setup_mode=SetupMode.ASYNC, + ) + await store.amset([("key1", [0.1, 0.2]), ("key2", "value2")]) + return store + + @pytest.mark.requires("astrapy") @pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") class TestAstraDBStore: - def test_mget(self, astra_db) -> None: # type: ignore[no-untyped-def] + def test_mget(self, astra_db: AstraDB) -> None: """Test AstraDBStore mget method.""" collection_name = "lc_test_store_mget" try: @@ -52,7 +80,16 @@ def test_mget(self, astra_db) -> None: # type: ignore[no-untyped-def] finally: astra_db.delete_collection(collection_name) - def test_mset(self, astra_db) -> None: # type: ignore[no-untyped-def] + async def test_amget(self, async_astra_db: AsyncAstraDB) -> None: + """Test AstraDBStore amget method.""" + collection_name = "lc_test_store_mget" + try: + store = await init_async_store(async_astra_db, collection_name) + assert await store.amget(["key1", "key2"]) == [[0.1, 0.2], "value2"] + finally: + await async_astra_db.delete_collection(collection_name) + + def test_mset(self, astra_db: AstraDB) -> None: """Test that multiple keys can be set with AstraDBStore.""" collection_name = "lc_test_store_mset" try: @@ -64,7 +101,19 @@ def test_mset(self, astra_db) -> None: # type: ignore[no-untyped-def] finally: astra_db.delete_collection(collection_name) - def test_mdelete(self, astra_db) -> None: # type: ignore[no-untyped-def] + async def test_amset(self, async_astra_db: AsyncAstraDB) -> None: + """Test that multiple keys can be set with AstraDBStore.""" + collection_name = "lc_test_store_mset" + try: + store = await init_async_store(async_astra_db, collection_name) + result = await store.async_collection.find_one({"_id": "key1"}) + assert result["data"]["document"]["value"] == [0.1, 0.2] + result = await store.async_collection.find_one({"_id": "key2"}) + assert result["data"]["document"]["value"] == "value2" + finally: + await async_astra_db.delete_collection(collection_name) + + def test_mdelete(self, astra_db: AstraDB) -> None: """Test that deletion works as expected.""" collection_name = "lc_test_store_mdelete" try: @@ -75,7 +124,18 @@ def test_mdelete(self, astra_db) -> None: # type: ignore[no-untyped-def] finally: astra_db.delete_collection(collection_name) - def test_yield_keys(self, astra_db) -> None: # type: ignore[no-untyped-def] + async def test_amdelete(self, async_astra_db: AsyncAstraDB) -> None: + """Test that deletion works as expected.""" + collection_name = "lc_test_store_mdelete" + try: + store = await init_async_store(async_astra_db, collection_name) + await store.amdelete(["key1", "key2"]) + result = await store.amget(["key1", "key2"]) + assert result == [None, None] + finally: + await async_astra_db.delete_collection(collection_name) + + def test_yield_keys(self, astra_db: AstraDB) -> None: collection_name = "lc_test_store_yield_keys" try: store = init_store(astra_db, collection_name) @@ -85,7 +145,20 @@ def test_yield_keys(self, astra_db) -> None: # type: ignore[no-untyped-def] finally: astra_db.delete_collection(collection_name) - def test_bytestore_mget(self, astra_db) -> None: # type: ignore[no-untyped-def] + async def test_ayield_keys(self, async_astra_db: AsyncAstraDB) -> None: + collection_name = "lc_test_store_yield_keys" + try: + store = await init_async_store(async_astra_db, collection_name) + assert {key async for key in store.ayield_keys()} == {"key1", "key2"} + assert {key async for key in store.ayield_keys(prefix="key")} == { + "key1", + "key2", + } + assert {key async for key in store.ayield_keys(prefix="lang")} == set() + finally: + await async_astra_db.delete_collection(collection_name) + + def test_bytestore_mget(self, astra_db: AstraDB) -> None: """Test AstraDBByteStore mget method.""" collection_name = "lc_test_bytestore_mget" try: @@ -94,7 +167,7 @@ def test_bytestore_mget(self, astra_db) -> None: # type: ignore[no-untyped-def] finally: astra_db.delete_collection(collection_name) - def test_bytestore_mset(self, astra_db) -> None: # type: ignore[no-untyped-def] + def test_bytestore_mset(self, astra_db: AstraDB) -> None: """Test that multiple keys can be set with AstraDBByteStore.""" collection_name = "lc_test_bytestore_mset" try: diff --git a/libs/community/tests/integration_tests/storage/test_mongodb.py b/libs/community/tests/integration_tests/storage/test_mongodb.py new file mode 100644 index 0000000000000..44062e994e051 --- /dev/null +++ b/libs/community/tests/integration_tests/storage/test_mongodb.py @@ -0,0 +1,73 @@ +from typing import Generator + +import pytest +from langchain_core.documents import Document + +from langchain_community.storage.mongodb import MongoDBStore + +pytest.importorskip("pymongo") + + +@pytest.fixture +def mongo_store() -> Generator: + import mongomock + + # mongomock creates a mock MongoDB instance for testing purposes + with mongomock.patch(servers=(("localhost", 27017),)): + yield MongoDBStore("mongodb://localhost:27017/", "test_db", "test_collection") + + +def test_mset_and_mget(mongo_store: MongoDBStore) -> None: + doc1 = Document(page_content="doc1") + doc2 = Document(page_content="doc2") + + # Set documents in the store + mongo_store.mset([("key1", doc1), ("key2", doc2)]) + + # Get documents from the store + retrieved_docs = mongo_store.mget(["key1", "key2"]) + + assert retrieved_docs[0] and retrieved_docs[0].page_content == "doc1" + assert retrieved_docs[1] and retrieved_docs[1].page_content == "doc2" + + +def test_yield_keys(mongo_store: MongoDBStore) -> None: + mongo_store.mset( + [ + ("key1", Document(page_content="doc1")), + ("key2", Document(page_content="doc2")), + ("another_key", Document(page_content="other")), + ] + ) + + # Test without prefix + keys = list(mongo_store.yield_keys()) + assert set(keys) == {"key1", "key2", "another_key"} + + # Test with prefix + keys_with_prefix = list(mongo_store.yield_keys(prefix="key")) + assert set(keys_with_prefix) == {"key1", "key2"} + + +def test_mdelete(mongo_store: MongoDBStore) -> None: + mongo_store.mset( + [ + ("key1", Document(page_content="doc1")), + ("key2", Document(page_content="doc2")), + ] + ) + # Delete single document + mongo_store.mdelete(["key1"]) + remaining_docs = list(mongo_store.yield_keys()) + assert "key1" not in remaining_docs + assert "key2" in remaining_docs + + # Delete multiple documents + mongo_store.mdelete(["key2"]) + remaining_docs = list(mongo_store.yield_keys()) + assert len(remaining_docs) == 0 + + +def test_init_errors() -> None: + with pytest.raises(ValueError): + MongoDBStore("", "", "") diff --git a/libs/community/tests/integration_tests/utilities/test_github.py b/libs/community/tests/integration_tests/utilities/test_github.py index 6e21a8a9c1469..77b87ec64f786 100644 --- a/libs/community/tests/integration_tests/utilities/test_github.py +++ b/libs/community/tests/integration_tests/utilities/test_github.py @@ -19,3 +19,9 @@ def test_get_open_issues(api_client: GitHubAPIWrapper) -> None: """Basic test to fetch issues""" issues = api_client.get_issues() assert len(issues) != 0 + + +def test_search_issues_and_prs(api_client: GitHubAPIWrapper) -> None: + """Basic test to search issues and PRs""" + results = api_client.search_issues_and_prs("is:pr is:merged") + assert len(results) != 0 diff --git a/libs/community/tests/integration_tests/vectorstores/test_lancedb.py b/libs/community/tests/integration_tests/vectorstores/test_lancedb.py index 37098e221141d..bde46e800e116 100644 --- a/libs/community/tests/integration_tests/vectorstores/test_lancedb.py +++ b/libs/community/tests/integration_tests/vectorstores/test_lancedb.py @@ -1,8 +1,11 @@ +import pytest + from langchain_community.vectorstores import LanceDB from tests.integration_tests.vectorstores.fake_embeddings import FakeEmbeddings -def test_lancedb() -> None: +@pytest.mark.requires("lancedb") +def test_lancedb_with_connection() -> None: import lancedb embeddings = FakeEmbeddings() @@ -23,22 +26,23 @@ def test_lancedb() -> None: assert "text 1" in result_texts -def test_lancedb_add_texts() -> None: - import lancedb +@pytest.mark.requires("lancedb") +def test_lancedb_without_connection() -> None: + embeddings = FakeEmbeddings() + texts = ["text 1", "text 2", "item 3"] + + store = LanceDB(embedding=embeddings) + store.add_texts(texts) + result = store.similarity_search("text 1") + result_texts = [doc.page_content for doc in result] + assert "text 1" in result_texts + +@pytest.mark.requires("lancedb") +def test_lancedb_add_texts() -> None: embeddings = FakeEmbeddings() - db = lancedb.connect("/tmp/lancedb") - texts = ["text 1"] - vectors = embeddings.embed_documents(texts) - table = db.create_table( - "my_table", - data=[ - {"vector": vectors[idx], "id": text, "text": text} - for idx, text in enumerate(texts) - ], - mode="overwrite", - ) - store = LanceDB(table, embeddings) + + store = LanceDB(embedding=embeddings) store.add_texts(["text 2"]) result = store.similarity_search("text 2") result_texts = [doc.page_content for doc in result] diff --git a/libs/community/tests/integration_tests/vectorstores/test_rocksetdb.py b/libs/community/tests/integration_tests/vectorstores/test_rocksetdb.py index 25f56d522464b..56950ce8c4623 100644 --- a/libs/community/tests/integration_tests/vectorstores/test_rocksetdb.py +++ b/libs/community/tests/integration_tests/vectorstores/test_rocksetdb.py @@ -1,5 +1,6 @@ import logging import os +import uuid from langchain_core.documents import Document @@ -31,10 +32,10 @@ # # See https://rockset.com/blog/introducing-vector-search-on-rockset/ for more details. -workspace = "langchain_tests" -collection_name = "langchain_demo" -text_key = "description" -embedding_key = "description_embedding" +WORKSPACE = "morgana" +COLLECTION_NAME = "langchain_demo" +TEXT_KEY = "description" +EMBEDDING_KEY = "description_embedding" class TestRockset: @@ -59,7 +60,7 @@ def setup_class(cls) -> None: elif region == "dev": host = rockset.DevRegions.usw2a1 else: - logger.warn( + logger.warning( "Using ROCKSET_REGION:%s as it is.. \ You should know what you're doing...", region, @@ -71,9 +72,9 @@ def setup_class(cls) -> None: if os.environ.get("ROCKSET_DELETE_DOCS_ON_START") == "1": logger.info( "Deleting all existing documents from the Rockset collection %s", - collection_name, + COLLECTION_NAME, ) - query = f"select _id from {workspace}.{collection_name}" + query = f"select _id from {WORKSPACE}.{COLLECTION_NAME}" query_response = client.Queries.query(sql={"query": query}) ids = [ @@ -84,15 +85,15 @@ def setup_class(cls) -> None: ] logger.info("Existing ids in collection: %s", ids) client.Documents.delete_documents( - collection=collection_name, + collection=COLLECTION_NAME, data=[rockset.models.DeleteDocumentsRequestData(id=i) for i in ids], - workspace=workspace, + workspace=WORKSPACE, ) embeddings = ConsistentFakeEmbeddings() embeddings.embed_documents(fake_texts) cls.rockset_vectorstore = Rockset( - client, embeddings, collection_name, text_key, embedding_key, workspace + client, embeddings, COLLECTION_NAME, TEXT_KEY, EMBEDDING_KEY, WORKSPACE ) def test_rockset_insert_and_search(self) -> None: @@ -120,6 +121,36 @@ def test_rockset_insert_and_search(self) -> None: ) assert output == [Document(page_content="bar", metadata={"metadata_index": 1})] + def test_add_documents_and_delete(self) -> None: + """ "add_documents" and "delete" are requirements to support use + with RecordManager""" + + texts = ["foo", "bar", "baz"] + metadatas = [{"metadata_index": i} for i in range(len(texts))] + + _docs = zip(texts, metadatas) + docs = [Document(page_content=pc, metadata=i) for pc, i in _docs] + + ids = self.rockset_vectorstore.add_documents(docs) + assert len(ids) == len(texts) + + deleted = self.rockset_vectorstore.delete(ids) + assert deleted + + def test_add_texts_does_not_modify_metadata(self) -> None: + """If metadata changes it will inhibit the langchain RecordManager + functionality""" + + texts = ["kitty", "doggy"] + metadatas = [{"source": "kitty.txt"}, {"source": "doggy.txt"}] + ids = [str(uuid.uuid4()), str(uuid.uuid4())] + + self.rockset_vectorstore.add_texts(texts=texts, metadatas=metadatas, ids=ids) + + for metadata in metadatas: + assert len(metadata) == 1 + assert list(metadata.keys())[0] == "source" + def test_build_query_sql(self) -> None: vector = [1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0] q_str = self.rockset_vectorstore._build_query_sql( @@ -129,9 +160,9 @@ def test_build_query_sql(self) -> None: ) vector_str = ",".join(map(str, vector)) expected = f"""\ -SELECT * EXCEPT({embedding_key}), \ -COSINE_SIM({embedding_key}, [{vector_str}]) as dist -FROM {workspace}.{collection_name} +SELECT * EXCEPT({EMBEDDING_KEY}), \ +COSINE_SIM({EMBEDDING_KEY}, [{vector_str}]) as dist +FROM {WORKSPACE}.{COLLECTION_NAME} ORDER BY dist DESC LIMIT 4 """ @@ -147,9 +178,9 @@ def test_build_query_sql_with_where(self) -> None: ) vector_str = ",".join(map(str, vector)) expected = f"""\ -SELECT * EXCEPT({embedding_key}), \ -COSINE_SIM({embedding_key}, [{vector_str}]) as dist -FROM {workspace}.{collection_name} +SELECT * EXCEPT({EMBEDDING_KEY}), \ +COSINE_SIM({EMBEDDING_KEY}, [{vector_str}]) as dist +FROM {WORKSPACE}.{COLLECTION_NAME} WHERE age >= 10 ORDER BY dist DESC LIMIT 4 diff --git a/libs/community/tests/integration_tests/vectorstores/test_singlestoredb.py b/libs/community/tests/integration_tests/vectorstores/test_singlestoredb.py index 4bd23acbe7741..4f690f079fdd3 100644 --- a/libs/community/tests/integration_tests/vectorstores/test_singlestoredb.py +++ b/libs/community/tests/integration_tests/vectorstores/test_singlestoredb.py @@ -4,6 +4,7 @@ import numpy as np import pytest from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings from langchain_community.vectorstores.singlestoredb import SingleStoreDB from langchain_community.vectorstores.utils import DistanceStrategy @@ -43,6 +44,16 @@ def embed_query(self, text: str) -> List[float]: return self.normalize(super().embed_query(text)) +class RandomEmbeddings(Embeddings): + """Fake embeddings with random vectors. For testing purposes.""" + + def embed_documents(self, texts: List[str]) -> List[List[float]]: + return [np.random.rand(100).tolist() for _ in texts] + + def embed_query(self, text: str) -> List[float]: + return np.random.rand(100).tolist() + + @pytest.fixture def texts() -> List[str]: return ["foo", "bar", "baz"] @@ -99,6 +110,66 @@ def test_singlestoredb_euclidean_distance(texts: List[str]) -> None: drop(table_name) +@pytest.mark.skipif(not singlestoredb_installed, reason="singlestoredb not installed") +def test_singlestoredb_vector_index_1(texts: List[str]) -> None: + """Test adding a new document""" + table_name = "test_singlestoredb_vector_index_1" + drop(table_name) + docsearch = SingleStoreDB.from_texts( + texts, + FakeEmbeddings(), + distance_strategy=DistanceStrategy.EUCLIDEAN_DISTANCE, + table_name=table_name, + use_vector_index=True, + vector_size=10, + host=TEST_SINGLESTOREDB_URL, + ) + docsearch.add_texts(["foo"]) + output = docsearch.similarity_search("foo", k=2) + assert output == TEST_RESULT + drop(table_name) + + +@pytest.mark.skipif(not singlestoredb_installed, reason="singlestoredb not installed") +def test_singlestoredb_vector_index_2(texts: List[str]) -> None: + """Test adding a new document""" + table_name = "test_singlestoredb_vector_index_2" + drop(table_name) + docsearch = SingleStoreDB.from_texts( + texts, + FakeEmbeddings(), + table_name=table_name, + use_vector_index=True, + vector_index_options={"index_type": "IVF_PQ", "nlist": 256}, + vector_size=10, + host=TEST_SINGLESTOREDB_URL, + ) + docsearch.add_texts(["foo"]) + output = docsearch.similarity_search("foo", k=1) + output[0].page_content == "foo" + drop(table_name) + + +@pytest.mark.skipif(not singlestoredb_installed, reason="singlestoredb not installed") +def test_singlestoredb_vector_index_large() -> None: + """Test adding a new document""" + table_name = "test_singlestoredb_vector_index_large" + drop(table_name) + docsearch = SingleStoreDB.from_texts( + ["foo"] * 300000, + RandomEmbeddings(), + distance_strategy=DistanceStrategy.EUCLIDEAN_DISTANCE, + table_name=table_name, + use_vector_index=True, + vector_size=100, + vector_index_name="vector_index_large", + host=TEST_SINGLESTOREDB_URL, + ) + output = docsearch.similarity_search("foo", k=1) + assert output[0].page_content == "foo" + drop(table_name) + + @pytest.mark.skipif(not singlestoredb_installed, reason="singlestoredb not installed") def test_singlestoredb_from_existing(texts: List[str]) -> None: """Test adding a new document""" diff --git a/libs/community/tests/unit_tests/agent_toolkits/test_imports.py b/libs/community/tests/unit_tests/agent_toolkits/test_imports.py index 3a7ca10efdf26..c2dbdd3833399 100644 --- a/libs/community/tests/unit_tests/agent_toolkits/test_imports.py +++ b/libs/community/tests/unit_tests/agent_toolkits/test_imports.py @@ -28,6 +28,7 @@ "create_pbi_chat_agent", "create_spark_sql_agent", "create_sql_agent", + "CogniswitchToolkit", ] diff --git a/libs/community/tests/unit_tests/chat_models/test_imports.py b/libs/community/tests/unit_tests/chat_models/test_imports.py index b160f2d659500..10dc7c8a226d1 100644 --- a/libs/community/tests/unit_tests/chat_models/test_imports.py +++ b/libs/community/tests/unit_tests/chat_models/test_imports.py @@ -38,6 +38,7 @@ "VolcEngineMaasChat", "LlamaEdgeChatService", "GPTRouter", + "ChatYuan2", "ChatZhipuAI", "ChatPerplexity", ] diff --git a/libs/community/tests/unit_tests/chat_models/test_yuan2.py b/libs/community/tests/unit_tests/chat_models/test_yuan2.py new file mode 100644 index 0000000000000..c5253375d310a --- /dev/null +++ b/libs/community/tests/unit_tests/chat_models/test_yuan2.py @@ -0,0 +1,64 @@ +"""Test ChatYuan2 wrapper.""" + +import pytest +from langchain_core.messages import ( + AIMessage, + HumanMessage, + SystemMessage, +) + +from langchain_community.chat_models.yuan2 import ( + ChatYuan2, + _convert_dict_to_message, + _convert_message_to_dict, +) + + +@pytest.mark.requires("openai") +def test_yuan2_model_param() -> None: + chat = ChatYuan2(model="foo") + assert chat.model_name == "foo" + chat = ChatYuan2(model_name="foo") + assert chat.model_name == "foo" + + +def test__convert_message_to_dict_human() -> None: + message = HumanMessage(content="foo") + result = _convert_message_to_dict(message) + expected_output = {"role": "user", "content": "foo"} + assert result == expected_output + + +def test__convert_message_to_dict_ai() -> None: + message = AIMessage(content="foo") + result = _convert_message_to_dict(message) + expected_output = {"role": "assistant", "content": "foo"} + assert result == expected_output + + +def test__convert_message_to_dict_system() -> None: + message = SystemMessage(content="foo") + result = _convert_message_to_dict(message) + expected_output = {"role": "system", "content": "foo"} + assert result == expected_output + + +def test__convert_dict_to_message_human() -> None: + message = {"role": "user", "content": "hello"} + result = _convert_dict_to_message(message) + expected_output = HumanMessage(content="hello") + assert result == expected_output + + +def test__convert_dict_to_message_ai() -> None: + message = {"role": "assistant", "content": "hello"} + result = _convert_dict_to_message(message) + expected_output = AIMessage(content="hello") + assert result == expected_output + + +def test__convert_dict_to_message_system() -> None: + message = {"role": "system", "content": "hello"} + result = _convert_dict_to_message(message) + expected_output = SystemMessage(content="hello") + assert result == expected_output diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_c.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_c.py new file mode 100644 index 0000000000000..aa6cb58688941 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_c.py @@ -0,0 +1,53 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.c import CSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestCSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """int main() { + return 0; +} + +struct S { +}; + +union U { +}; + +enum Evens { + Two = 2, + Four = 4 +};""" + + self.expected_simplified_code = """// Code for: int main() { + +// Code for: struct S { + +// Code for: union U { + +// Code for: enum Evens {""" + + self.expected_extracted_code = [ + "int main() {\n return 0;\n}", + "struct S {\n}", + "union U {\n}", + "enum Evens {\n Two = 2,\n Four = 4\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(CSegmenter("int a;").is_valid()) + self.assertFalse(CSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = CSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = CSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_cpp.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_cpp.py new file mode 100644 index 0000000000000..a06b7fa7aff9d --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_cpp.py @@ -0,0 +1,63 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.cpp import CPPSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestCPPSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """int foo() { + return 1; +} + +class T { + auto bar() const -> int; + template + void baz(U) { + } +}; + +struct S { +}; + +union U { +}; + +auto T::bar() const -> int { + return 1; +}""" + + self.expected_simplified_code = """// Code for: int foo() { + +// Code for: class T { + +// Code for: struct S { + +// Code for: union U { + +// Code for: auto T::bar() const -> int {""" + + self.expected_extracted_code = [ + "int foo() {\n return 1;\n}", + "class T {\n auto bar() const -> int;\n " + "template\n void baz(U) {\n }\n}", + "struct S {\n}", + "union U {\n}", + "auto T::bar() const -> int {\n return 1;\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(CPPSegmenter("int a;").is_valid()) + self.assertFalse(CPPSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = CPPSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = CPPSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_csharp.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_csharp.py new file mode 100644 index 0000000000000..3f04713f4ce45 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_csharp.py @@ -0,0 +1,78 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.csharp import CSharpSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestCSharpSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """namespace World +{ +} + +class Hello +{ + static void Main(string []args) + { + System.Console.WriteLine("Hello, world."); + } +} + +interface Human +{ + void breathe(); +} + +enum Tens +{ + Ten = 10, + Twenty = 20 +} + +struct T +{ +} + +record Person(string FirstName, string LastName, string Id) +{ + internal string Id { get; init; } = Id; +}""" + + self.expected_simplified_code = """// Code for: namespace World + +// Code for: class Hello + +// Code for: interface Human + +// Code for: enum Tens + +// Code for: struct T + +// Code for: record Person(string FirstName, string LastName, string Id)""" + + self.expected_extracted_code = [ + "namespace World\n{\n}", + "class Hello\n{\n static void Main(string []args)\n {\n " + 'System.Console.WriteLine("Hello, world.");\n }\n}', + "interface Human\n{\n void breathe();\n}", + "enum Tens\n{\n Ten = 10,\n Twenty = 20\n}", + "struct T\n{\n}", + "record Person(string FirstName, string LastName, string Id)\n{\n " + "internal string Id { get; init; } = Id;\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(CSharpSegmenter("int a;").is_valid()) + self.assertFalse(CSharpSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = CSharpSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = CSharpSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_go.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_go.py new file mode 100644 index 0000000000000..e1360e35d9793 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_go.py @@ -0,0 +1,50 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.go import GoSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestGoSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """func foo(a int) int { + return a; +} + +type T struct { + a int + b bool + c string +} + +type S interface { + bar() float64 +} +""" + + self.expected_simplified_code = """// Code for: func foo(a int) int { + +// Code for: type T struct { + +// Code for: type S interface {""" + + self.expected_extracted_code = [ + "func foo(a int) int {\n return a;\n}", + "type T struct {\n a int\n b bool\n c string\n}", + "type S interface {\n bar() float64\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(GoSegmenter("var a int;").is_valid()) + self.assertFalse(GoSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = GoSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = GoSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_java.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_java.py new file mode 100644 index 0000000000000..1129ae1a889fe --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_java.py @@ -0,0 +1,57 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.java import JavaSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestJavaSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """class Hello +{ + public static void main(String[] args) + { + System.out.println("Hello, world."); + } +} + +interface Human +{ + void breathe(); +} + +enum Tens +{ + TEN, + TWENTY +} +""" + + self.expected_simplified_code = """// Code for: class Hello + +// Code for: interface Human + +// Code for: enum Tens""" + + self.expected_extracted_code = [ + "class Hello\n{\n " + "public static void main(String[] args)\n {\n " + 'System.out.println("Hello, world.");\n }\n}', + "interface Human\n{\n void breathe();\n}", + "enum Tens\n{\n TEN,\n TWENTY\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(JavaSegmenter("int a;").is_valid()) + self.assertFalse(JavaSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = JavaSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = JavaSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_kotlin.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_kotlin.py new file mode 100644 index 0000000000000..35bde9feb26e7 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_kotlin.py @@ -0,0 +1,60 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.kotlin import KotlinSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestKotlinSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """fun foo(a: Int): Int { + return a +} + +class T { + var a: Int = 0 + var b: Boolean = false + var c: String = "" +} + +interface S { + fun bar(): Double +} + +enum class P { + A, + B, + C +} +""" + + self.expected_simplified_code = """// Code for: fun foo(a: Int): Int { + +// Code for: class T { + +// Code for: interface S { + +// Code for: enum class P {""" + + self.expected_extracted_code = [ + "fun foo(a: Int): Int {\n return a\n}", + "class T {\n var a: Int = 0\n var b: Boolean = false\n " + 'var c: String = ""\n}', + "interface S {\n fun bar(): Double\n}", + "enum class P {\n A,\n B,\n C\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(KotlinSegmenter("val a: Int = 5").is_valid()) + self.assertFalse(KotlinSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = KotlinSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = KotlinSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_lua.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_lua.py new file mode 100644 index 0000000000000..afb50b8345bfe --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_lua.py @@ -0,0 +1,55 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.lua import LuaSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestLuaSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """function F() + print("Hello") +end + +local function G() + print("Goodbye") +end""" + + self.expected_simplified_code = """-- Code for: function F() + +-- Code for: local function G()""" + + self.expected_extracted_code = [ + 'function F()\n print("Hello")\nend', + 'local function G()\n print("Goodbye")\nend', + ] + + def test_is_valid(self) -> None: + self.assertTrue(LuaSegmenter("local a").is_valid()) + self.assertFalse(LuaSegmenter("a b c 1 2 3").is_valid()) + + # TODO: Investigate flakey-ness. + @pytest.mark.skip( + reason=( + "Flakey. To be investigated. See " + "https://github.com/langchain-ai/langchain/actions/runs/7907779756/job/21585580650." # noqa: E501 + ) + ) + def test_extract_functions_classes(self) -> None: + segmenter = LuaSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + # TODO: Investigate flakey-ness. + @pytest.mark.skip( + reason=( + "Flakey. To be investigated. See " + "https://github.com/langchain-ai/langchain/actions/runs/7923203031/job/21632416298?pr=17599 " # noqa: E501 + "and https://github.com/langchain-ai/langchain/actions/runs/7923784089/job/2163420864." # noqa: E501 + ) + ) + def test_simplify_code(self) -> None: + segmenter = LuaSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_perl.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_perl.py new file mode 100644 index 0000000000000..78e3fa25a0003 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_perl.py @@ -0,0 +1,44 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.perl import PerlSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestPerlSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """sub Hello { + print "Hello, World!"; +} + +sub new { + my $class = shift; + my $self = {}; + bless $self, $class; + return $self; +}""" + + self.expected_simplified_code = """# Code for: sub Hello { + +# Code for: sub new {""" + + self.expected_extracted_code = [ + 'sub Hello {\n print "Hello, World!";\n}', + "sub new {\n my $class = shift;\n my $self = {};\n " + "bless $self, $class;\n return $self;\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(PerlSegmenter("$age = 25;").is_valid()) + self.assertFalse(PerlSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = PerlSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = PerlSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_ruby.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_ruby.py new file mode 100644 index 0000000000000..99dd7db1fa813 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_ruby.py @@ -0,0 +1,51 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.ruby import RubySegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestRubySegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """def foo + i = 0 +end + +module M + def hi + i = 2 + end +end + +class T + def bar + j = 1 + end +end""" + + self.expected_simplified_code = """# Code for: def foo + +# Code for: module M + +# Code for: class T""" + + self.expected_extracted_code = [ + "def foo\n i = 0\nend", + "module M\n def hi\n i = 2\n end\nend", + "class T\n def bar\n j = 1\n end\nend", + ] + + def test_is_valid(self) -> None: + self.assertTrue(RubySegmenter("def a; end").is_valid()) + self.assertFalse(RubySegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = RubySegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = RubySegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_rust.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_rust.py new file mode 100644 index 0000000000000..6b35677c30505 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_rust.py @@ -0,0 +1,50 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.rust import RustSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestRustSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """fn foo() -> i32 { + return 1; +} + +struct T { + a: i32, + b: bool, + c: String +} + +trait S { + fn bar() -> Self +} +""" + + self.expected_simplified_code = """// Code for: fn foo() -> i32 { + +// Code for: struct T { + +// Code for: trait S {""" + + self.expected_extracted_code = [ + "fn foo() -> i32 {\n return 1;\n}", + "struct T {\n a: i32,\n b: bool,\n c: String\n}", + "trait S {\n fn bar() -> Self\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(RustSegmenter("let a: i32;").is_valid()) + self.assertFalse(RustSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = RustSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = RustSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_scala.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_scala.py new file mode 100644 index 0000000000000..3fad1aeb806be --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_scala.py @@ -0,0 +1,56 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.scala import ScalaSegmenter + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestScalaSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """def foo() { + return 1 +} + +object T { + def baz() { + val x = 1 + } +} + +class S() { + +} + +trait T { + def P(x: Any): Boolean +}""" + + self.expected_simplified_code = """// Code for: def foo() { + +// Code for: object T { + +// Code for: class S() { + +// Code for: trait T {""" + + self.expected_extracted_code = [ + "def foo() {\n return 1\n}", + "object T {\n def baz() {\n val x = 1\n }\n}", + "class S() {\n\n}", + "trait T {\n def P(x: Any): Boolean\n}", + ] + + def test_is_valid(self) -> None: + self.assertFalse(ScalaSegmenter("val x").is_valid()) + self.assertFalse(ScalaSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = ScalaSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = ScalaSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/parsers/language/test_typescript.py b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_typescript.py new file mode 100644 index 0000000000000..caf4b6ad66ccf --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/parsers/language/test_typescript.py @@ -0,0 +1,67 @@ +import unittest + +import pytest + +from langchain_community.document_loaders.parsers.language.typescript import ( + TypeScriptSegmenter, +) + + +@pytest.mark.requires("tree_sitter", "tree_sitter_languages") +class TestTypeScriptSegmenter(unittest.TestCase): + def setUp(self) -> None: + self.example_code = """function foo(): number +{ + return 1; +} + +class Autumn +{ + leafCount = 45; + reduceTemperature(desiredTemperature: number): number { + return desiredTemperature * 0.6; + } +} + +interface Season +{ + change(): void; +} + +enum Colors +{ + Green = 'green', + Red = 'red', +} +""" + + self.expected_simplified_code = """// Code for: function foo(): number + +// Code for: class Autumn + +// Code for: interface Season + +// Code for: enum Colors""" + + self.expected_extracted_code = [ + "function foo(): number\n{\n return 1;\n}", + "class Autumn\n{\n leafCount = 45;\n " + "reduceTemperature(desiredTemperature: number): number {\n " + "return desiredTemperature * 0.6;\n }\n}", + "interface Season\n{\n change(): void;\n}", + "enum Colors\n{\n Green = 'green',\n Red = 'red',\n}", + ] + + def test_is_valid(self) -> None: + self.assertTrue(TypeScriptSegmenter("let a;").is_valid()) + self.assertFalse(TypeScriptSegmenter("a b c 1 2 3").is_valid()) + + def test_extract_functions_classes(self) -> None: + segmenter = TypeScriptSegmenter(self.example_code) + extracted_code = segmenter.extract_functions_classes() + self.assertEqual(extracted_code, self.expected_extracted_code) + + def test_simplify_code(self) -> None: + segmenter = TypeScriptSegmenter(self.example_code) + simplified_code = segmenter.simplify_code() + self.assertEqual(simplified_code, self.expected_simplified_code) diff --git a/libs/community/tests/unit_tests/document_loaders/test_directory.py b/libs/community/tests/unit_tests/document_loaders/test_directory.py index dc028e4814c70..f83e4bc2dfe1f 100644 --- a/libs/community/tests/unit_tests/document_loaders/test_directory.py +++ b/libs/community/tests/unit_tests/document_loaders/test_directory.py @@ -1,4 +1,8 @@ +from pathlib import Path +from typing import Any, List + import pytest +from langchain_core.documents import Document from langchain_community.document_loaders import DirectoryLoader @@ -17,3 +21,35 @@ def test_raise_error_if_path_is_not_directory() -> None: loader.load() assert str(e.value) == f"Expected directory, got file: '{__file__}'" + + +class CustomLoader: + """Test loader. Mimics interface of existing file loader.""" + + def __init__(self, path: Path, **kwargs: Any) -> None: + """Initialize the loader.""" + self.path = path + + def load(self) -> List[Document]: + """Load documents.""" + with open(self.path, "r") as f: + return [Document(page_content=f.read())] + + +def test_exclude_ignores_matching_files(tmp_path: Path) -> None: + txt_file = tmp_path / "test.txt" + py_file = tmp_path / "test.py" + txt_file.touch() + py_file.touch() + loader = DirectoryLoader( + str(tmp_path), + exclude=["*.py"], + loader_cls=CustomLoader, # type: ignore + ) + data = loader.load() + assert len(data) == 1 + + +def test_exclude_as_string_converts_to_sequence() -> None: + loader = DirectoryLoader("./some_directory", exclude="*.py") + assert loader.exclude == ("*.py",) diff --git a/libs/community/tests/unit_tests/document_loaders/test_imports.py b/libs/community/tests/unit_tests/document_loaders/test_imports.py index d22f81aa19ba3..e4b406add5773 100644 --- a/libs/community/tests/unit_tests/document_loaders/test_imports.py +++ b/libs/community/tests/unit_tests/document_loaders/test_imports.py @@ -23,6 +23,7 @@ "AssemblyAIAudioTranscriptLoader", "AstraDBLoader", "AsyncHtmlLoader", + "AthenaLoader", "AzureAIDataLoader", "AzureAIDocumentIntelligenceLoader", "AzureBlobStorageContainerLoader", @@ -48,6 +49,7 @@ "CubeSemanticLoader", "DataFrameLoader", "DatadogLogsLoader", + "PebbloSafeLoader", "DiffbotLoader", "DirectoryLoader", "DiscordChatLoader", diff --git a/libs/community/tests/unit_tests/document_loaders/test_pebblo.py b/libs/community/tests/unit_tests/document_loaders/test_pebblo.py new file mode 100644 index 0000000000000..9ab487c8e7804 --- /dev/null +++ b/libs/community/tests/unit_tests/document_loaders/test_pebblo.py @@ -0,0 +1,114 @@ +import os +from pathlib import Path +from typing import Dict + +import pytest +from langchain_core.documents import Document +from pytest_mock import MockerFixture + +from langchain_community.document_loaders import CSVLoader, PyPDFLoader + +EXAMPLE_DOCS_DIRECTORY = str(Path(__file__).parent.parent.parent / "examples/") + + +class MockResponse: + def __init__(self, json_data: Dict, status_code: int): + self.json_data = json_data + self.status_code = status_code + + def json(self) -> Dict: + return self.json_data + + +def test_pebblo_import() -> None: + """Test that the Pebblo safe loader can be imported.""" + from langchain_community.document_loaders import PebbloSafeLoader # noqa: F401 + + +def test_empty_filebased_loader(mocker: MockerFixture) -> None: + """Test basic file based csv loader.""" + # Setup + from langchain_community.document_loaders import PebbloSafeLoader + + mocker.patch.multiple( + "requests", + get=MockResponse(json_data={"data": ""}, status_code=200), + post=MockResponse(json_data={"data": ""}, status_code=200), + ) + + file_path = os.path.join(EXAMPLE_DOCS_DIRECTORY, "test_empty.csv") + expected_docs: list = [] + + # Exercise + loader = PebbloSafeLoader( + CSVLoader(file_path=file_path), + "dummy_app_name", + "dummy_owner", + "dummy_description", + ) + result = loader.load() + + # Assert + assert result == expected_docs + + +def test_csv_loader_load_valid_data(mocker: MockerFixture) -> None: + # Setup + from langchain_community.document_loaders import PebbloSafeLoader + + mocker.patch.multiple( + "requests", + get=MockResponse(json_data={"data": ""}, status_code=200), + post=MockResponse(json_data={"data": ""}, status_code=200), + ) + file_path = os.path.join(EXAMPLE_DOCS_DIRECTORY, "test_nominal.csv") + expected_docs = [ + Document( + page_content="column1: value1\ncolumn2: value2\ncolumn3: value3", + metadata={"source": file_path, "row": 0}, + ), + Document( + page_content="column1: value4\ncolumn2: value5\ncolumn3: value6", + metadata={"source": file_path, "row": 1}, + ), + ] + + # Exercise + loader = PebbloSafeLoader( + CSVLoader(file_path=file_path), + "dummy_app_name", + "dummy_owner", + "dummy_description", + ) + result = loader.load() + + # Assert + assert result == expected_docs + + +@pytest.mark.requires("pypdf") +def test_pdf_lazy_load(mocker: MockerFixture) -> None: + # Setup + from langchain_community.document_loaders import PebbloSafeLoader + + mocker.patch.multiple( + "requests", + get=MockResponse(json_data={"data": ""}, status_code=200), + post=MockResponse(json_data={"data": ""}, status_code=200), + ) + file_path = os.path.join( + EXAMPLE_DOCS_DIRECTORY, "multi-page-forms-sample-2-page.pdf" + ) + + # Exercise + loader = PebbloSafeLoader( + PyPDFLoader(file_path=file_path), + "dummy_app_name", + "dummy_owner", + "dummy_description", + ) + + result = list(loader.lazy_load()) + + # Assert + assert len(result) == 2 diff --git a/libs/community/tests/unit_tests/embeddings/test_imports.py b/libs/community/tests/unit_tests/embeddings/test_imports.py index 2220d7446f90a..c66672c79ee0d 100644 --- a/libs/community/tests/unit_tests/embeddings/test_imports.py +++ b/libs/community/tests/unit_tests/embeddings/test_imports.py @@ -58,6 +58,8 @@ "BookendEmbeddings", "VolcanoEmbeddings", "OCIGenAIEmbeddings", + "QuantizedBiEncoderEmbeddings", + "NeMoEmbeddings", ] diff --git a/libs/community/tests/unit_tests/graphs/test_imports.py b/libs/community/tests/unit_tests/graphs/test_imports.py index 653d7d540ba5f..202ecefa24997 100644 --- a/libs/community/tests/unit_tests/graphs/test_imports.py +++ b/libs/community/tests/unit_tests/graphs/test_imports.py @@ -6,6 +6,7 @@ "Neo4jGraph", "NebulaGraph", "NeptuneGraph", + "NeptuneRdfGraph", "KuzuGraph", "HugeGraph", "RdfGraph", diff --git a/libs/community/tests/unit_tests/graphs/test_neptune_graph.py b/libs/community/tests/unit_tests/graphs/test_neptune_graph.py index e3d986f2eb04b..6e714a41665a7 100644 --- a/libs/community/tests/unit_tests/graphs/test_neptune_graph.py +++ b/libs/community/tests/unit_tests/graphs/test_neptune_graph.py @@ -1,2 +1,5 @@ def test_import() -> None: - from langchain_community.graphs import NeptuneGraph # noqa: F401 + from langchain_community.graphs import ( + NeptuneGraph, # noqa: F401 + NeptuneRdfGraph, # noqa: F401 + ) diff --git a/libs/community/tests/unit_tests/llms/test_imports.py b/libs/community/tests/unit_tests/llms/test_imports.py index 9c7abb11f83d9..6c8c6504e36a2 100644 --- a/libs/community/tests/unit_tests/llms/test_imports.py +++ b/libs/community/tests/unit_tests/llms/test_imports.py @@ -87,6 +87,7 @@ "JavelinAIGateway", "QianfanLLMEndpoint", "YandexGPT", + "Yuan2", "VolcEngineMaasLLM", "WatsonxLLM", ] diff --git a/libs/community/tests/unit_tests/llms/test_llamafile.py b/libs/community/tests/unit_tests/llms/test_llamafile.py new file mode 100644 index 0000000000000..10fea66a5ac73 --- /dev/null +++ b/libs/community/tests/unit_tests/llms/test_llamafile.py @@ -0,0 +1,158 @@ +import json +from collections import deque +from typing import Any, Dict + +import pytest +import requests +from pytest import MonkeyPatch + +from langchain_community.llms.llamafile import Llamafile + + +def default_generation_params() -> Dict[str, Any]: + return { + "temperature": 0.8, + "seed": -1, + "top_k": 40, + "top_p": 0.95, + "min_p": 0.05, + "n_predict": -1, + "n_keep": 0, + "tfs_z": 1.0, + "typical_p": 1.0, + "repeat_penalty": 1.1, + "repeat_last_n": 64, + "penalize_nl": True, + "presence_penalty": 0.0, + "frequency_penalty": 0.0, + "mirostat": 0, + "mirostat_tau": 5.0, + "mirostat_eta": 0.1, + } + + +def mock_response() -> requests.Response: + contents = json.dumps({"content": "the quick brown fox"}) + response = requests.Response() + response.status_code = 200 + response._content = str.encode(contents) + return response + + +def mock_response_stream(): # type: ignore[no-untyped-def] + mock_response = deque( + [ + b'data: {"content":"the","multimodal":false,"slot_id":0,"stop":false}\n\n', # noqa + b'data: {"content":" quick","multimodal":false,"slot_id":0,"stop":false}\n\n', # noqa + ] + ) + + class MockRaw: + def read(self, chunk_size): # type: ignore[no-untyped-def] + try: + return mock_response.popleft() + except IndexError: + return None + + response = requests.Response() + response.status_code = 200 + response.raw = MockRaw() + return response + + +def test_call(monkeypatch: MonkeyPatch) -> None: + """ + Test basic functionality of the `invoke` method + """ + llm = Llamafile( + base_url="http://llamafile-host:8080", + ) + + def mock_post(url, headers, json, stream, timeout): # type: ignore[no-untyped-def] + assert url == "http://llamafile-host:8080/completion" + assert headers == { + "Content-Type": "application/json", + } + # 'unknown' kwarg should be ignored + assert json == {"prompt": "Test prompt", **default_generation_params()} + assert stream is False + assert timeout is None + return mock_response() + + monkeypatch.setattr(requests, "post", mock_post) + out = llm.invoke("Test prompt") + assert out == "the quick brown fox" + + +def test_call_with_kwargs(monkeypatch: MonkeyPatch) -> None: + """ + Test kwargs passed to `invoke` override the default values and are passed + to the endpoint correctly. Also test that any 'unknown' kwargs that are not + present in the LLM class attrs are ignored. + """ + llm = Llamafile( + base_url="http://llamafile-host:8080", + ) + + def mock_post(url, headers, json, stream, timeout): # type: ignore[no-untyped-def] + assert url == "http://llamafile-host:8080/completion" + assert headers == { + "Content-Type": "application/json", + } + # 'unknown' kwarg should be ignored + expected = {"prompt": "Test prompt", **default_generation_params()} + expected["seed"] = 0 + assert json == expected + assert stream is False + assert timeout is None + return mock_response() + + monkeypatch.setattr(requests, "post", mock_post) + out = llm.invoke( + "Test prompt", + unknown="unknown option", # should be ignored + seed=0, # should override the default + ) + assert out == "the quick brown fox" + + +def test_call_raises_exception_on_missing_server(monkeypatch: MonkeyPatch) -> None: + """ + Test that the LLM raises a ConnectionError when no llamafile server is + listening at the base_url. + """ + llm = Llamafile( + # invalid url, nothing should actually be running here + base_url="http://llamafile-host:8080", + ) + with pytest.raises(requests.exceptions.ConnectionError): + llm.invoke("Test prompt") + + +def test_streaming(monkeypatch: MonkeyPatch) -> None: + """ + Test basic functionality of `invoke` with streaming enabled. + """ + llm = Llamafile( + base_url="http://llamafile-hostname:8080", + streaming=True, + ) + + def mock_post(url, headers, json, stream, timeout): # type: ignore[no-untyped-def] + assert url == "http://llamafile-hostname:8080/completion" + assert headers == { + "Content-Type": "application/json", + } + # 'unknown' kwarg should be ignored + assert "unknown" not in json + expected = {"prompt": "Test prompt", **default_generation_params()} + expected["stream"] = True + assert json == expected + assert stream is True + assert timeout is None + + return mock_response_stream() + + monkeypatch.setattr(requests, "post", mock_post) + out = llm.invoke("Test prompt") + assert out == "the quick" diff --git a/libs/community/tests/unit_tests/storage/test_imports.py b/libs/community/tests/unit_tests/storage/test_imports.py index 27bcec56d4b26..21f79d464e756 100644 --- a/libs/community/tests/unit_tests/storage/test_imports.py +++ b/libs/community/tests/unit_tests/storage/test_imports.py @@ -3,6 +3,7 @@ EXPECTED_ALL = [ "AstraDBStore", "AstraDBByteStore", + "MongoDBStore", "RedisStore", "UpstashRedisByteStore", "UpstashRedisStore", diff --git a/libs/community/tests/unit_tests/storage/test_mongodb.py b/libs/community/tests/unit_tests/storage/test_mongodb.py new file mode 100644 index 0000000000000..a1dc9ea906868 --- /dev/null +++ b/libs/community/tests/unit_tests/storage/test_mongodb.py @@ -0,0 +1,11 @@ +"""Light weight unit test that attempts to import MongodbStore. + +The actual code is tested in integration tests. + +This test is intended to catch errors in the import process. +""" + + +def test_import_storage() -> None: + """Attempt to import storage modules.""" + from langchain_community.storage.mongodb import MongoDBStore # noqa diff --git a/libs/community/tests/unit_tests/test_sql_database.py b/libs/community/tests/unit_tests/test_sql_database.py index 42aaef7134f53..4293265a1defd 100644 --- a/libs/community/tests/unit_tests/test_sql_database.py +++ b/libs/community/tests/unit_tests/test_sql_database.py @@ -10,7 +10,6 @@ String, Table, Text, - create_engine, insert, select, ) @@ -35,11 +34,19 @@ ) -def test_table_info() -> None: - """Test that table info is constructed properly.""" - engine = create_engine("sqlite:///:memory:") +@pytest.fixture +def engine() -> sa.Engine: + return sa.create_engine("sqlite:///:memory:") + + +@pytest.fixture +def db(engine: sa.Engine) -> SQLDatabase: metadata_obj.create_all(engine) - db = SQLDatabase(engine) + return SQLDatabase(engine) + + +def test_table_info(db: SQLDatabase) -> None: + """Test that table info is constructed properly.""" output = db.table_info expected_output = """ CREATE TABLE user ( @@ -68,20 +75,19 @@ def test_table_info() -> None: assert sorted(" ".join(output.split())) == sorted(" ".join(expected_output.split())) -def test_table_info_w_sample_rows() -> None: +def test_table_info_w_sample_rows(db: SQLDatabase) -> None: """Test that table info is constructed properly.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. values = [ {"user_id": 13, "user_name": "Harrison", "user_bio": "bio"}, {"user_id": 14, "user_name": "Chase", "user_bio": "bio"}, ] stmt = insert(user).values(values) - with engine.begin() as conn: - conn.execute(stmt) - - db = SQLDatabase(engine, sample_rows_in_table_info=2) + db._execute(stmt) + # Query and verify. + db = SQLDatabase(db._engine, sample_rows_in_table_info=2) output = db.table_info expected_output = """ @@ -112,16 +118,16 @@ def test_table_info_w_sample_rows() -> None: assert sorted(output.split()) == sorted(expected_output.split()) -def test_sql_database_run_fetch_all() -> None: +def test_sql_database_run_fetch_all(db: SQLDatabase) -> None: """Verify running SQL expressions returning results as strings.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. stmt = insert(user).values( user_id=13, user_name="Harrison", user_bio="That is my Bio " * 24 ) - with engine.begin() as conn: - conn.execute(stmt) - db = SQLDatabase(engine) + db._execute(stmt) + + # Query and verify. command = "select user_id, user_name, user_bio from user where user_id = 13" partial_output = db.run(command) user_bio = "That is my Bio " * 19 + "That is my..." @@ -135,60 +141,57 @@ def test_sql_database_run_fetch_all() -> None: assert full_output == expected_full_output -def test_sql_database_run_fetch_result() -> None: +def test_sql_database_run_fetch_result(db: SQLDatabase) -> None: """Verify running SQL expressions returning results as SQLAlchemy `Result` instances.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. stmt = insert(user).values(user_id=17, user_name="hwchase") - with engine.begin() as conn: - conn.execute(stmt) - db = SQLDatabase(engine) - command = "select user_id, user_name, user_bio from user where user_id = 17" + db._execute(stmt) + # Query and verify. + command = "select user_id, user_name, user_bio from user where user_id = 17" result = db.run(command, fetch="cursor", include_columns=True) expected = [{"user_id": 17, "user_name": "hwchase", "user_bio": None}] assert isinstance(result, Result) assert result.mappings().fetchall() == expected -def test_sql_database_run_with_parameters() -> None: +def test_sql_database_run_with_parameters(db: SQLDatabase) -> None: """Verify running SQL expressions with query parameters.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. stmt = insert(user).values(user_id=17, user_name="hwchase") - with engine.begin() as conn: - conn.execute(stmt) - db = SQLDatabase(engine) - command = "select user_id, user_name, user_bio from user where user_id = :user_id" + db._execute(stmt) + # Query and verify. + command = "select user_id, user_name, user_bio from user where user_id = :user_id" full_output = db.run(command, parameters={"user_id": 17}, include_columns=True) expected_full_output = "[{'user_id': 17, 'user_name': 'hwchase', 'user_bio': None}]" assert full_output == expected_full_output -def test_sql_database_run_sqlalchemy_selectable() -> None: +def test_sql_database_run_sqlalchemy_selectable(db: SQLDatabase) -> None: """Verify running SQL expressions using SQLAlchemy selectable.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. stmt = insert(user).values(user_id=17, user_name="hwchase") - with engine.begin() as conn: - conn.execute(stmt) - db = SQLDatabase(engine) - command = select(user).where(user.c.user_id == 17) + db._execute(stmt) + # Query and verify. + command = select(user).where(user.c.user_id == 17) full_output = db.run(command, include_columns=True) expected_full_output = "[{'user_id': 17, 'user_name': 'hwchase', 'user_bio': None}]" assert full_output == expected_full_output -def test_sql_database_run_update() -> None: +def test_sql_database_run_update(db: SQLDatabase) -> None: """Test commands which return no rows return an empty string.""" - engine = create_engine("sqlite:///:memory:") - metadata_obj.create_all(engine) + + # Provision. stmt = insert(user).values(user_id=13, user_name="Harrison") - with engine.begin() as conn: - conn.execute(stmt) - db = SQLDatabase(engine) + db._execute(stmt) + + # Query and verify. command = "update user set user_name='Updated' where user_id = 13" output = db.run(command) expected_output = "" @@ -198,7 +201,7 @@ def test_sql_database_run_update() -> None: def test_sql_database_schema_translate_map() -> None: """Verify using statement-specific execution options.""" - engine = create_engine("sqlite:///:memory:") + engine = sa.create_engine("sqlite:///:memory:") db = SQLDatabase(engine) # Define query using SQLAlchemy selectable. diff --git a/libs/community/tests/unit_tests/tools/test_imports.py b/libs/community/tests/unit_tests/tools/test_imports.py index 4bf70aa0842f9..95fd4315575a5 100644 --- a/libs/community/tests/unit_tests/tools/test_imports.py +++ b/libs/community/tests/unit_tests/tools/test_imports.py @@ -24,6 +24,10 @@ "BingSearchRun", "BraveSearch", "ClickTool", + "CogniswitchKnowledgeSourceFile", + "CogniswitchKnowledgeSourceURL", + "CogniswitchKnowledgeRequest", + "CogniswitchKnowledgeStatus", "ConneryAction", "CopyFileTool", "CurrentWebPageTool", diff --git a/libs/community/tests/unit_tests/tools/test_public_api.py b/libs/community/tests/unit_tests/tools/test_public_api.py index 31ea8327022e1..1595dd4710917 100644 --- a/libs/community/tests/unit_tests/tools/test_public_api.py +++ b/libs/community/tests/unit_tests/tools/test_public_api.py @@ -25,6 +25,10 @@ "BingSearchRun", "BraveSearch", "ClickTool", + "CogniswitchKnowledgeSourceFile", + "CogniswitchKnowledgeStatus", + "CogniswitchKnowledgeSourceURL", + "CogniswitchKnowledgeRequest", "ConneryAction", "CopyFileTool", "CurrentWebPageTool", diff --git a/libs/community/tests/unit_tests/vectorstores/test_databricks_vector_search.py b/libs/community/tests/unit_tests/vectorstores/test_databricks_vector_search.py index 1a03890f637d9..4bdcee9acfb91 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_databricks_vector_search.py +++ b/libs/community/tests/unit_tests/vectorstores/test_databricks_vector_search.py @@ -1,7 +1,8 @@ +import itertools import random import uuid -from typing import List -from unittest.mock import MagicMock +from typing import Dict, List, Optional, Set +from unittest.mock import MagicMock, patch import pytest @@ -120,6 +121,52 @@ def embed_query(self, text: str) -> List[float]: "next_page_token": "", } +EXAMPLE_SEARCH_RESPONSE_FIXED_SCORE: Dict = { + "manifest": { + "column_count": 3, + "columns": [ + {"name": DEFAULT_PRIMARY_KEY}, + {"name": DEFAULT_TEXT_COLUMN}, + {"name": "score"}, + ], + }, + "result": { + "row_count": len(fake_texts), + "data_array": sorted( + [[str(uuid.uuid4()), s, 0.5] for s in fake_texts], + key=lambda x: x[2], # type: ignore + reverse=True, + ), + }, + "next_page_token": "", +} + +EXAMPLE_SEARCH_RESPONSE_WITH_EMBEDDING = { + "manifest": { + "column_count": 3, + "columns": [ + {"name": DEFAULT_PRIMARY_KEY}, + {"name": DEFAULT_TEXT_COLUMN}, + {"name": DEFAULT_VECTOR_COLUMN}, + {"name": "score"}, + ], + }, + "result": { + "row_count": len(fake_texts), + "data_array": sorted( + [ + [str(uuid.uuid4()), s, e, random.uniform(0, 1)] + for s, e in zip( + fake_texts, DEFAULT_EMBEDDING_MODEL.embed_documents(fake_texts) + ) + ], + key=lambda x: x[2], # type: ignore + reverse=True, + ), + }, + "next_page_token": "", +} + def mock_index(index_details: dict) -> MagicMock: from databricks.vector_search.client import VectorSearchIndex @@ -129,11 +176,14 @@ def mock_index(index_details: dict) -> MagicMock: return index -def default_databricks_vector_search(index: MagicMock) -> DatabricksVectorSearch: +def default_databricks_vector_search( + index: MagicMock, columns: Optional[List[str]] = None +) -> DatabricksVectorSearch: return DatabricksVectorSearch( index, embedding=DEFAULT_EMBEDDING_MODEL, text_column=DEFAULT_TEXT_COLUMN, + columns=columns, ) @@ -456,6 +506,100 @@ def test_similarity_search(index_details: dict) -> None: assert all([DEFAULT_PRIMARY_KEY in d.metadata for d in search_result]) +@pytest.mark.requires("databricks", "databricks.vector_search") +@pytest.mark.parametrize( + "index_details, columns, expected_columns", + [ + (DELTA_SYNC_INDEX_SELF_MANAGED_EMBEDDINGS, None, {"id"}), + (DIRECT_ACCESS_INDEX, None, {"id"}), + ( + DELTA_SYNC_INDEX_SELF_MANAGED_EMBEDDINGS, + [DEFAULT_PRIMARY_KEY, DEFAULT_TEXT_COLUMN, DEFAULT_VECTOR_COLUMN], + {"text_vector", "id"}, + ), + ( + DIRECT_ACCESS_INDEX, + [DEFAULT_PRIMARY_KEY, DEFAULT_TEXT_COLUMN, DEFAULT_VECTOR_COLUMN], + {"text_vector", "id"}, + ), + ], +) +def test_mmr_search( + index_details: dict, columns: Optional[List[str]], expected_columns: Set[str] +) -> None: + index = mock_index(index_details) + index.similarity_search.return_value = EXAMPLE_SEARCH_RESPONSE_WITH_EMBEDDING + vectorsearch = default_databricks_vector_search(index, columns) + query = fake_texts[0] + filters = {"some filter": True} + limit = 1 + + search_result = vectorsearch.max_marginal_relevance_search( + query, k=limit, filters=filters + ) + assert [doc.page_content for doc in search_result] == [fake_texts[0]] + assert [set(doc.metadata.keys()) for doc in search_result] == [expected_columns] + + +@pytest.mark.requires("databricks", "databricks.vector_search") +@pytest.mark.parametrize( + "index_details", [DELTA_SYNC_INDEX_SELF_MANAGED_EMBEDDINGS, DIRECT_ACCESS_INDEX] +) +def test_mmr_parameters(index_details: dict) -> None: + index = mock_index(index_details) + index.similarity_search.return_value = EXAMPLE_SEARCH_RESPONSE_WITH_EMBEDDING + query = fake_texts[0] + limit = 1 + fetch_k = 3 + lambda_mult = 0.25 + filters = {"some filter": True} + + with patch( + "langchain_community.vectorstores.databricks_vector_search.maximal_marginal_relevance" + ) as mock_mmr: + mock_mmr.return_value = [2] + retriever = default_databricks_vector_search(index).as_retriever( + search_type="mmr", + search_kwargs={ + "k": limit, + "fetch_k": fetch_k, + "lambda_mult": lambda_mult, + "filters": filters, + }, + ) + search_result = retriever.get_relevant_documents(query) + + mock_mmr.assert_called_once() + assert mock_mmr.call_args[1]["lambda_mult"] == lambda_mult + assert index.similarity_search.call_args[1]["num_results"] == fetch_k + assert index.similarity_search.call_args[1]["filters"] == filters + assert len(search_result) == limit + + +@pytest.mark.requires("databricks", "databricks.vector_search") +@pytest.mark.parametrize( + "index_details, threshold", itertools.product(ALL_INDEXES, [0.4, 0.5, 0.8]) +) +def test_similarity_score_threshold(index_details: dict, threshold: float) -> None: + index = mock_index(index_details) + index.similarity_search.return_value = EXAMPLE_SEARCH_RESPONSE_FIXED_SCORE + uniform_response_score = EXAMPLE_SEARCH_RESPONSE_FIXED_SCORE["result"][ + "data_array" + ][0][2] + query = fake_texts[0] + limit = len(fake_texts) + + retriever = default_databricks_vector_search(index).as_retriever( + search_type="similarity_score_threshold", + search_kwargs={"k": limit, "score_threshold": threshold}, + ) + search_result = retriever.get_relevant_documents(query) + if uniform_response_score >= threshold: + assert len(search_result) == len(fake_texts) + else: + assert len(search_result) == 0 + + @pytest.mark.requires("databricks", "databricks.vector_search") @pytest.mark.parametrize( "index_details", [DELTA_SYNC_INDEX_SELF_MANAGED_EMBEDDINGS, DIRECT_ACCESS_INDEX] diff --git a/libs/community/tests/unit_tests/vectorstores/test_faiss.py b/libs/community/tests/unit_tests/vectorstores/test_faiss.py index 375fa00eb6d0b..cedecc8ada67e 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_faiss.py +++ b/libs/community/tests/unit_tests/vectorstores/test_faiss.py @@ -438,8 +438,9 @@ def test_faiss_with_metadatas_and_filter() -> None: ) assert docsearch.docstore.__dict__ == expected_docstore.__dict__ output = docsearch.similarity_search("foo", k=1, filter={"page": 1}) - assert output == [Document(page_content="foo", metadata={"page": 0})] - assert output != [Document(page_content="bar", metadata={"page": 1})] + # make sure it returns the result that matches the filter. + # Not the one who's text matches better. + assert output == [Document(page_content="bar", metadata={"page": 1})] assert output == docsearch.similarity_search( "foo", k=1, filter=lambda di: di["page"] == 1 ) @@ -465,8 +466,9 @@ async def test_faiss_async_with_metadatas_and_filter() -> None: ) assert docsearch.docstore.__dict__ == expected_docstore.__dict__ output = await docsearch.asimilarity_search("foo", k=1, filter={"page": 1}) - assert output == [Document(page_content="foo", metadata={"page": 0})] - assert output != [Document(page_content="bar", metadata={"page": 1})] + # make sure it returns the result that matches the filter. + # Not the one who's text matches better. + assert output == [Document(page_content="bar", metadata={"page": 1})] assert output == await docsearch.asimilarity_search( "foo", k=1, filter=lambda di: di["page"] == 1 ) diff --git a/libs/community/tests/unit_tests/vectorstores/test_indexing_docs.py b/libs/community/tests/unit_tests/vectorstores/test_indexing_docs.py index 1232d6bb9a681..85c5312d1f924 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_indexing_docs.py +++ b/libs/community/tests/unit_tests/vectorstores/test_indexing_docs.py @@ -68,6 +68,7 @@ def check_compatibility(vector_store: VectorStore) -> bool: "Pinecone", "Qdrant", "Redis", + "Rockset", "ScaNN", "SemaDB", "SupabaseVectorStore", diff --git a/libs/community/tests/unit_tests/vectorstores/test_public_api.py b/libs/community/tests/unit_tests/vectorstores/test_public_api.py index 53713f3e1bebf..48b51accdda87 100644 --- a/libs/community/tests/unit_tests/vectorstores/test_public_api.py +++ b/libs/community/tests/unit_tests/vectorstores/test_public_api.py @@ -6,6 +6,7 @@ "AlibabaCloudOpenSearchSettings", "AnalyticDB", "Annoy", + "ApacheDoris", "AtlasDB", "AwaDB", "AzureSearch", diff --git a/libs/core/langchain_core/_api/beta_decorator.py b/libs/core/langchain_core/_api/beta_decorator.py index 7326dbb5ef4a0..19f5db11df5e5 100644 --- a/libs/core/langchain_core/_api/beta_decorator.py +++ b/libs/core/langchain_core/_api/beta_decorator.py @@ -206,10 +206,9 @@ def finalize( # type: ignore old_doc = inspect.cleandoc(old_doc or "").strip("\n") + # old_doc can be None if not old_doc: - new_doc = "[*Beta*]" - else: - new_doc = f"[*Beta*] {old_doc}" + old_doc = "" # Modify the docstring to include a beta notice. notes_header = "\nNotes\n-----" @@ -218,7 +217,7 @@ def finalize( # type: ignore addendum, ] details = " ".join([component.strip() for component in components if component]) - new_doc += ( + new_doc = ( f"[*Beta*] {old_doc}\n" f"{notes_header if notes_header not in old_doc else ''}\n" f".. beta::\n" diff --git a/libs/core/langchain_core/_api/deprecation.py b/libs/core/langchain_core/_api/deprecation.py index b4d56b70cb494..970cfa569463b 100644 --- a/libs/core/langchain_core/_api/deprecation.py +++ b/libs/core/langchain_core/_api/deprecation.py @@ -245,10 +245,9 @@ def finalize( # type: ignore old_doc = inspect.cleandoc(old_doc or "").strip("\n") + # old_doc can be None if not old_doc: - new_doc = "[*Deprecated*]" - else: - new_doc = f"[*Deprecated*] {old_doc}" + old_doc = "" # Modify the docstring to include a deprecation notice. notes_header = "\nNotes\n-----" @@ -258,7 +257,7 @@ def finalize( # type: ignore addendum, ] details = " ".join([component.strip() for component in components if component]) - new_doc += ( + new_doc = ( f"[*Deprecated*] {old_doc}\n" f"{notes_header if notes_header not in old_doc else ''}\n" f".. deprecated:: {since}\n" diff --git a/libs/core/langchain_core/language_models/llms.py b/libs/core/langchain_core/language_models/llms.py index ec2af1b91e088..36d9435325146 100644 --- a/libs/core/langchain_core/language_models/llms.py +++ b/libs/core/langchain_core/language_models/llms.py @@ -1120,7 +1120,7 @@ def save(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(file_path, "w") as f: json.dump(prompt_dict, f, indent=4) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "w") as f: yaml.dump(prompt_dict, f, default_flow_style=False) else: diff --git a/libs/core/langchain_core/messages/base.py b/libs/core/langchain_core/messages/base.py index 42bd5e0869c3e..12229244cc473 100644 --- a/libs/core/langchain_core/messages/base.py +++ b/libs/core/langchain_core/messages/base.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Sequence, Union +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Union from langchain_core.load.serializable import Serializable from langchain_core.pydantic_v1 import Extra, Field @@ -25,6 +25,8 @@ class BaseMessage(Serializable): type: str + name: Optional[str] = None + class Config: extra = Extra.allow @@ -53,6 +55,8 @@ def __add__(self, other: Any) -> ChatPromptTemplate: def pretty_repr(self, html: bool = False) -> str: title = get_msg_title_repr(self.type.title() + " Message", bold=html) # TODO: handle non-string content. + if self.name is not None: + title += f"\nName: {self.name}" return f"{title}\n\n{self.content}" def pretty_print(self) -> None: diff --git a/libs/core/langchain_core/output_parsers/json.py b/libs/core/langchain_core/output_parsers/json.py index d16316596a7c1..c107cd0c765a3 100644 --- a/libs/core/langchain_core/output_parsers/json.py +++ b/libs/core/langchain_core/output_parsers/json.py @@ -221,7 +221,8 @@ def get_format_instructions(self) -> str: if self.pydantic_object is None: return "Return a JSON object." else: - schema = self.pydantic_object.schema() + # Copy schema to avoid altering original Pydantic schema. + schema = {k: v for k, v in self.pydantic_object.schema().items()} # Remove extraneous fields. reduced_schema = schema diff --git a/libs/core/langchain_core/prompts/base.py b/libs/core/langchain_core/prompts/base.py index 51eddf0f7718c..96cfbf63740e1 100644 --- a/libs/core/langchain_core/prompts/base.py +++ b/libs/core/langchain_core/prompts/base.py @@ -221,7 +221,7 @@ def save(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(file_path, "w") as f: json.dump(prompt_dict, f, indent=4) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "w") as f: yaml.dump(prompt_dict, f, default_flow_style=False) else: diff --git a/libs/core/langchain_core/prompts/loading.py b/libs/core/langchain_core/prompts/loading.py index dd6e0c35478d3..baa56a5666d72 100644 --- a/libs/core/langchain_core/prompts/loading.py +++ b/libs/core/langchain_core/prompts/loading.py @@ -146,7 +146,7 @@ def _load_prompt_from_file(file: Union[str, Path]) -> BasePromptTemplate: if file_path.suffix == ".json": with open(file_path) as f: config = json.load(f) - elif file_path.suffix == ".yaml": + elif file_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "r") as f: config = yaml.safe_load(f) else: diff --git a/libs/core/langchain_core/tracers/log_stream.py b/libs/core/langchain_core/tracers/log_stream.py index b3cadb55a6af7..2d92964b1939c 100644 --- a/libs/core/langchain_core/tracers/log_stream.py +++ b/libs/core/langchain_core/tracers/log_stream.py @@ -2,7 +2,6 @@ import asyncio import copy -import math import threading from collections import defaultdict from typing import ( @@ -20,7 +19,6 @@ from uuid import UUID import jsonpatch # type: ignore[import] -from anyio import BrokenResourceError, ClosedResourceError, create_memory_object_stream from typing_extensions import NotRequired, TypedDict from langchain_core.load import dumps @@ -29,6 +27,7 @@ from langchain_core.runnables import Runnable, RunnableConfig, ensure_config from langchain_core.runnables.utils import Input, Output from langchain_core.tracers.base import BaseTracer +from langchain_core.tracers.memory_stream import _MemoryStream from langchain_core.tracers.schemas import Run @@ -210,12 +209,11 @@ def __init__( self.exclude_types = exclude_types self.exclude_tags = exclude_tags - send_stream: Any - receive_stream: Any - send_stream, receive_stream = create_memory_object_stream(math.inf) + loop = asyncio.get_event_loop() + memory_stream = _MemoryStream[RunLogPatch](loop) self.lock = threading.Lock() - self.send_stream = send_stream - self.receive_stream = receive_stream + self.send_stream = memory_stream.get_send_stream() + self.receive_stream = memory_stream.get_receive_stream() self._key_map_by_run_id: Dict[UUID, str] = {} self._counter_map_by_name: Dict[str, int] = defaultdict(int) self.root_id: Optional[UUID] = None @@ -225,11 +223,12 @@ def __aiter__(self) -> AsyncIterator[RunLogPatch]: def send(self, *ops: Dict[str, Any]) -> bool: """Send a patch to the stream, return False if the stream is closed.""" - try: - self.send_stream.send_nowait(RunLogPatch(*ops)) - return True - except (ClosedResourceError, BrokenResourceError): - return False + # We will likely want to wrap this in try / except at some point + # to handle exceptions that might arise at run time. + # For now we'll let the exception bubble up, and always return + # True on the happy path. + self.send_stream.send_nowait(RunLogPatch(*ops)) + return True async def tap_output_aiter( self, run_id: UUID, output: AsyncIterator[T] diff --git a/libs/core/langchain_core/tracers/memory_stream.py b/libs/core/langchain_core/tracers/memory_stream.py new file mode 100644 index 0000000000000..871b5ca932e93 --- /dev/null +++ b/libs/core/langchain_core/tracers/memory_stream.py @@ -0,0 +1,104 @@ +"""Module implements a memory stream for communication between two co-routines. + +This module provides a way to communicate between two co-routines using a memory +channel. The writer and reader can be in the same event loop or in different event +loops. When they're in different event loops, they will also be in different +threads. + +This is useful in situations when there's a mix of synchronous and asynchronous +used in the code. +""" +import asyncio +from asyncio import AbstractEventLoop, Queue +from typing import AsyncIterator, Generic, TypeVar + +T = TypeVar("T") + + +class _SendStream(Generic[T]): + def __init__( + self, reader_loop: AbstractEventLoop, queue: Queue, done: object + ) -> None: + """Create a writer for the queue and done object. + + Args: + reader_loop: The event loop to use for the writer. This loop will be used + to schedule the writes to the queue. + queue: The queue to write to. This is an asyncio queue. + done: Special sentinel object to indicate that the writer is done. + """ + self._reader_loop = reader_loop + self._queue = queue + self._done = done + + async def send(self, item: T) -> None: + """Schedule the item to be written to the queue using the original loop.""" + return self.send_nowait(item) + + def send_nowait(self, item: T) -> None: + """Schedule the item to be written to the queue using the original loop.""" + self._reader_loop.call_soon_threadsafe(self._queue.put_nowait, item) + + async def aclose(self) -> None: + """Schedule the done object write the queue using the original loop.""" + return self.close() + + def close(self) -> None: + """Schedule the done object write the queue using the original loop.""" + self._reader_loop.call_soon_threadsafe(self._queue.put_nowait, self._done) + + +class _ReceiveStream(Generic[T]): + def __init__(self, queue: Queue, done: object) -> None: + """Create a reader for the queue and done object. + + This reader should be used in the same loop as the loop that was passed + to the channel. + """ + self._queue = queue + self._done = done + self._is_closed = False + + async def __aiter__(self) -> AsyncIterator[T]: + while True: + item = await self._queue.get() + if item is self._done: + self._is_closed = True + break + yield item + + +class _MemoryStream(Generic[T]): + """Stream data from a writer to a reader even if they are in different threads. + + Uses asyncio queues to communicate between two co-routines. This implementation + should work even if the writer and reader co-routines belong to two different + event loops (e.g. one running from an event loop in the main thread + and the other running in an event loop in a background thread). + + This implementation is meant to be used with a single writer and a single reader. + + This is an internal implementation to LangChain please do not use it directly. + """ + + def __init__(self, loop: AbstractEventLoop) -> None: + """Create a channel for the given loop. + + Args: + loop: The event loop to use for the channel. The reader is assumed + to be running in the same loop as the one passed to this constructor. + This will NOT be validated at run time. + """ + self._loop = loop + self._queue: asyncio.Queue = asyncio.Queue(maxsize=0) + self._done = object() + + def get_send_stream(self) -> _SendStream[T]: + """Get a writer for the channel.""" + return _SendStream[T]( + reader_loop=self._loop, queue=self._queue, done=self._done + ) + + def get_receive_stream(self) -> _ReceiveStream[T]: + """Get a reader for the channel.""" + return _ReceiveStream[T](queue=self._queue, done=self._done) diff --git a/libs/core/langchain_core/utils/_merge.py b/libs/core/langchain_core/utils/_merge.py index e21fdd96621fe..13b91270c70d8 100644 --- a/libs/core/langchain_core/utils/_merge.py +++ b/libs/core/langchain_core/utils/_merge.py @@ -19,11 +19,9 @@ def merge_dicts(left: Dict[str, Any], right: Dict[str, Any]) -> Dict[str, Any]: for k, v in right.items(): if k not in merged: merged[k] = v - elif merged[k] is None and v: + elif v is not None and merged[k] is None: merged[k] = v - elif v is None: - continue - elif merged[k] == v: + elif v is None or merged[k] == v: continue elif type(merged[k]) != type(v): raise TypeError( diff --git a/libs/core/poetry.lock b/libs/core/poetry.lock index b8b0e512b26ea..ed548c2f62b6a 100644 --- a/libs/core/poetry.lock +++ b/libs/core/poetry.lock @@ -810,7 +810,6 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -1124,13 +1123,13 @@ files = [ [[package]] name = "langsmith" -version = "0.0.87" +version = "0.1.1" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, - {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, ] [package.dependencies] @@ -1164,16 +1163,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1953,7 +1942,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1961,16 +1949,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1987,7 +1967,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1995,7 +1974,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -2766,4 +2744,4 @@ extended-testing = ["jinja2"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "fec5bca68972fa7f7eea00a3cf3e158424bafdf028573611c575c3859ab289c5" +content-hash = "8a8ec249b5b3843aaec58c5da0041f0c3846b2671ac64be80b981cef2840dc11" diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index fc082a98062e1..b87a95dd7dd64 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-core" -version = "0.1.22" +version = "0.1.24" description = "Building applications with LLMs through composability" authors = [] license = "MIT" @@ -11,7 +11,7 @@ repository = "https://github.com/langchain-ai/langchain" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" pydantic = ">=1,<3" -langsmith = "^0.0.87" +langsmith = "^0.1.0" tenacity = "^8.1.0" jsonpatch = "^1.33" anyio = ">=3,<5" diff --git a/libs/core/tests/unit_tests/_api/test_beta_decorator.py b/libs/core/tests/unit_tests/_api/test_beta_decorator.py index 499e63745f995..caef30c251c34 100644 --- a/libs/core/tests/unit_tests/_api/test_beta_decorator.py +++ b/libs/core/tests/unit_tests/_api/test_beta_decorator.py @@ -112,7 +112,7 @@ def test_beta_function() -> None: doc = beta_function.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") assert not inspect.iscoroutinefunction(beta_function) @@ -132,7 +132,7 @@ async def test_beta_async_function() -> None: doc = beta_function.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") assert inspect.iscoroutinefunction(beta_async_function) @@ -152,7 +152,7 @@ def test_beta_method() -> None: doc = obj.beta_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") assert not inspect.iscoroutinefunction(obj.beta_method) @@ -173,7 +173,7 @@ async def test_beta_async_method() -> None: doc = obj.beta_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") assert inspect.iscoroutinefunction(obj.beta_async_method) @@ -192,7 +192,7 @@ def test_beta_classmethod() -> None: doc = ClassWithBetaMethods.beta_classmethod.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") def test_beta_staticmethod() -> None: @@ -211,7 +211,7 @@ def test_beta_staticmethod() -> None: ) doc = ClassWithBetaMethods.beta_staticmethod.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") def test_beta_property() -> None: @@ -231,13 +231,12 @@ def test_beta_property() -> None: ) doc = ClassWithBetaMethods.beta_property.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") -def test_whole_class_deprecation() -> None: - """Test whole class deprecation.""" +def test_whole_class_beta() -> None: + """Test whole class beta status.""" - # Test whole class deprecation @beta() class BetaClass: def __init__(self) -> None: @@ -269,6 +268,73 @@ def beta_method(self) -> str: ) +def test_whole_class_inherited_beta() -> None: + """Test whole class beta status for inherited class. + + The original version of beta decorator created duplicates with + '[*Beta*]'. + """ + + # Test whole class beta status + @beta() + class BetaClass: + @beta() + def beta_method(self) -> str: + """original doc""" + return "This is a beta method." + + @beta() + class InheritedBetaClass(BetaClass): + @beta() + def beta_method(self) -> str: + """original doc""" + return "This is a beta method 2." + + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + + obj = BetaClass() + assert obj.beta_method() == "This is a beta method." + + assert len(warning_list) == 2 + warning = warning_list[0].message + assert str(warning) == ( + "The class `BetaClass` is in beta. It is actively being worked on, so the " + "API may change." + ) + + warning = warning_list[1].message + assert str(warning) == ( + "The function `beta_method` is in beta. It is actively being worked on, so " + "the API may change." + ) + + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + + obj = InheritedBetaClass() + assert obj.beta_method() == "This is a beta method 2." + + assert len(warning_list) == 2 + warning = warning_list[0].message + assert str(warning) == ( + "The class `InheritedBetaClass` is in beta. " + "It is actively being worked on, so the " + "API may change." + ) + + warning = warning_list[1].message + assert str(warning) == ( + "The function `beta_method` is in beta. " + "It is actively being worked on, so " + "the API may change." + ) + + # if [*Beta*] was inserted only once: + if obj.__doc__ is not None: + assert obj.__doc__.count("[*Beta*]") == 1 + + # Tests with pydantic models class MyModel(BaseModel): @beta() @@ -292,4 +358,4 @@ def test_beta_method_pydantic() -> None: doc = obj.beta_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Beta*] original doc") + assert doc.startswith("[*Beta*] original doc") diff --git a/libs/core/tests/unit_tests/_api/test_deprecation.py b/libs/core/tests/unit_tests/_api/test_deprecation.py index d26b18c3ad448..8573d64b37906 100644 --- a/libs/core/tests/unit_tests/_api/test_deprecation.py +++ b/libs/core/tests/unit_tests/_api/test_deprecation.py @@ -129,7 +129,7 @@ def test_deprecated_function() -> None: doc = deprecated_function.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") assert not inspect.iscoroutinefunction(deprecated_function) @@ -151,7 +151,7 @@ async def test_deprecated_async_function() -> None: doc = deprecated_function.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") assert inspect.iscoroutinefunction(deprecated_async_function) @@ -171,7 +171,7 @@ def test_deprecated_method() -> None: doc = obj.deprecated_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") assert not inspect.iscoroutinefunction(obj.deprecated_method) @@ -194,7 +194,7 @@ async def test_deprecated_async_method() -> None: doc = obj.deprecated_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") assert inspect.iscoroutinefunction(obj.deprecated_async_method) @@ -213,7 +213,7 @@ def test_deprecated_classmethod() -> None: doc = ClassWithDeprecatedMethods.deprecated_classmethod.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") def test_deprecated_staticmethod() -> None: @@ -233,7 +233,7 @@ def test_deprecated_staticmethod() -> None: ) doc = ClassWithDeprecatedMethods.deprecated_staticmethod.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") def test_deprecated_property() -> None: @@ -253,7 +253,7 @@ def test_deprecated_property() -> None: ) doc = ClassWithDeprecatedMethods.deprecated_property.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") def test_whole_class_deprecation() -> None: @@ -289,6 +289,88 @@ def deprecated_method(self) -> str: "The function `deprecated_method` was deprecated in " "LangChain 2.0.0 and will be removed in 3.0.0" ) + # [*Deprecated*] should be inserted only once: + if obj.__doc__ is not None: + assert obj.__doc__.count("[*Deprecated*]") == 1 + + +def test_whole_class_inherited_deprecation() -> None: + """Test whole class deprecation for inherited class. + + The original version of deprecation decorator created duplicates with + '[*Deprecated*]'. + """ + + # Test whole class deprecation + @deprecated(since="2.0.0", removal="3.0.0") + class DeprecatedClass: + def __init__(self) -> None: + """original doc""" + pass + + @deprecated(since="2.0.0", removal="3.0.0") + def deprecated_method(self) -> str: + """original doc""" + return "This is a deprecated method." + + @deprecated(since="2.2.0", removal="3.2.0") + class InheritedDeprecatedClass(DeprecatedClass): + """Inherited deprecated class.""" + + def __init__(self) -> None: + """original doc""" + pass + + @deprecated(since="2.2.0", removal="3.2.0") + def deprecated_method(self) -> str: + """original doc""" + return "This is a deprecated method." + + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + + obj = DeprecatedClass() + assert obj.deprecated_method() == "This is a deprecated method." + + assert len(warning_list) == 2 + warning = warning_list[0].message + assert str(warning) == ( + "The class `tests.unit_tests._api.test_deprecation.DeprecatedClass` was " + "deprecated in tests 2.0.0 and will be removed in 3.0.0" + ) + + warning = warning_list[1].message + assert str(warning) == ( + "The function `deprecated_method` was deprecated in " + "LangChain 2.0.0 and will be removed in 3.0.0" + ) + # if [*Deprecated*] was inserted only once: + if obj.__doc__ is not None: + assert obj.__doc__.count("[*Deprecated*]") == 1 + + with warnings.catch_warnings(record=True) as warning_list: + warnings.simplefilter("always") + + obj = InheritedDeprecatedClass() + assert obj.deprecated_method() == "This is a deprecated method." + + assert len(warning_list) == 2 + warning = warning_list[0].message + assert str(warning) == ( + "The class " + "`tests.unit_tests._api.test_deprecation.InheritedDeprecatedClass` " + "was deprecated in tests 2.2.0 and will be removed in 3.2.0" + ) + + warning = warning_list[1].message + assert str(warning) == ( + "The function `deprecated_method` was deprecated in " + "LangChain 2.2.0 and will be removed in 3.2.0" + ) + # if [*Deprecated*] was inserted only once: + if obj.__doc__ is not None: + assert obj.__doc__.count("[*Deprecated*]") == 1 + assert "[*Deprecated*] Inherited deprecated class." in obj.__doc__ # Tests with pydantic models @@ -314,4 +396,4 @@ def test_deprecated_method_pydantic() -> None: doc = obj.deprecated_method.__doc__ assert isinstance(doc, str) - assert doc.startswith("[*Deprecated*] original doc") + assert doc.startswith("[*Deprecated*] original doc") diff --git a/libs/core/tests/unit_tests/output_parsers/test_json.py b/libs/core/tests/unit_tests/output_parsers/test_json.py index 6d8cb4bee3995..8cedc767699af 100644 --- a/libs/core/tests/unit_tests/output_parsers/test_json.py +++ b/libs/core/tests/unit_tests/output_parsers/test_json.py @@ -8,6 +8,8 @@ parse_json_markdown, parse_partial_json, ) +from langchain_core.pydantic_v1 import BaseModel +from langchain_core.utils.function_calling import convert_to_openai_function GOOD_JSON = """```json { @@ -579,3 +581,17 @@ def input_iter(_: Any) -> Iterator[str]: {"country_name": "France", "population_size": 673915}, {"country_name": "France", "population_size": 67391582}, ] + + +def test_base_model_schema_consistency() -> None: + class Joke(BaseModel): + setup: str + punchline: str + + initial_joke_schema = {k: v for k, v in Joke.schema().items()} + SimpleJsonOutputParser(pydantic_object=Joke) + openai_func = convert_to_openai_function(Joke) + retrieved_joke_schema = {k: v for k, v in Joke.schema().items()} + + assert initial_joke_schema == retrieved_joke_schema + assert openai_func.get("name", None) is not None diff --git a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr index a717b95eecae2..fcd3cddf615eb 100644 --- a/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr +++ b/libs/core/tests/unit_tests/runnables/__snapshots__/test_runnable.ambr @@ -1718,6 +1718,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -1761,6 +1765,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -1909,6 +1917,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -1977,6 +1989,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -2020,6 +2036,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -2116,6 +2136,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -2159,6 +2183,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -2307,6 +2335,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -2375,6 +2407,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -2418,6 +2454,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -2498,6 +2538,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -2541,6 +2585,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -2642,6 +2690,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -2688,6 +2740,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -2731,6 +2787,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -2799,6 +2859,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -2842,6 +2906,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -2990,6 +3058,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -3058,6 +3130,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -3101,6 +3177,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -3169,6 +3249,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -3212,6 +3296,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -3360,6 +3448,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -3428,6 +3520,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -3471,6 +3567,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -3531,6 +3631,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -3574,6 +3678,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -3722,6 +3830,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -3801,6 +3913,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -3844,6 +3960,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', @@ -3931,6 +4051,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'ai', 'enum': list([ @@ -3974,6 +4098,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'role': dict({ 'title': 'Role', 'type': 'string', @@ -4075,6 +4203,10 @@ 'title': 'Example', 'type': 'boolean', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'human', 'enum': list([ @@ -4121,6 +4253,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'type': dict({ 'default': 'system', 'enum': list([ @@ -4164,6 +4300,10 @@ ]), 'title': 'Content', }), + 'name': dict({ + 'title': 'Name', + 'type': 'string', + }), 'tool_call_id': dict({ 'title': 'Tool Call Id', 'type': 'string', diff --git a/libs/core/tests/unit_tests/runnables/test_runnable.py b/libs/core/tests/unit_tests/runnables/test_runnable.py index 9272744bce1c5..ac7f5a0ccef7c 100644 --- a/libs/core/tests/unit_tests/runnables/test_runnable.py +++ b/libs/core/tests/unit_tests/runnables/test_runnable.py @@ -116,9 +116,9 @@ def _copy_run(self, run: Run) -> Run: return run.copy( update={ "id": self._replace_uuid(run.id), - "parent_run_id": self.uuids_map[run.parent_run_id] - if run.parent_run_id - else None, + "parent_run_id": ( + self.uuids_map[run.parent_run_id] if run.parent_run_id else None + ), "child_runs": [self._copy_run(child) for child in run.child_runs], "execution_order": None, "child_execution_order": None, @@ -345,6 +345,7 @@ async def typed_async_lambda_impl(x: str) -> int: "enum": ["ai"], "type": "string", }, + "name": {"title": "Name", "type": "string"}, "example": { "title": "Example", "default": False, @@ -380,6 +381,7 @@ async def typed_async_lambda_impl(x: str) -> int: "enum": ["human"], "type": "string", }, + "name": {"title": "Name", "type": "string"}, "example": { "title": "Example", "default": False, @@ -390,7 +392,7 @@ async def typed_async_lambda_impl(x: str) -> int: }, "ChatMessage": { "title": "ChatMessage", - "description": "Message that can be assigned an arbitrary speaker (i.e. role).", # noqa + "description": "Message that can be assigned an arbitrary speaker (i.e. role).", # noqa: E501 "type": "object", "properties": { "content": { @@ -415,13 +417,14 @@ async def typed_async_lambda_impl(x: str) -> int: "enum": ["chat"], "type": "string", }, + "name": {"title": "Name", "type": "string"}, "role": {"title": "Role", "type": "string"}, }, "required": ["content", "role"], }, "SystemMessage": { "title": "SystemMessage", - "description": "Message for priming AI behavior, usually passed in as the first of a sequence\nof input messages.", # noqa + "description": "Message for priming AI behavior, usually passed in as the first of a sequence\nof input messages.", # noqa: E501 "type": "object", "properties": { "content": { @@ -446,12 +449,13 @@ async def typed_async_lambda_impl(x: str) -> int: "enum": ["system"], "type": "string", }, + "name": {"title": "Name", "type": "string"}, }, "required": ["content"], }, "FunctionMessage": { "title": "FunctionMessage", - "description": "Message for passing the result of executing a function back to a model.", # noqa + "description": "Message for passing the result of executing a function back to a model.", # noqa: E501 "type": "object", "properties": { "content": { @@ -482,7 +486,7 @@ async def typed_async_lambda_impl(x: str) -> int: }, "ToolMessage": { "title": "ToolMessage", - "description": "Message for passing the result of executing a tool back to a model.", # noqa + "description": "Message for passing the result of executing a tool back to a model.", # noqa: E501 "type": "object", "properties": { "content": { @@ -507,6 +511,7 @@ async def typed_async_lambda_impl(x: str) -> int: "enum": ["tool"], "type": "string", }, + "name": {"title": "Name", "type": "string"}, "tool_call_id": {"title": "Tool Call Id", "type": "string"}, }, "required": ["content", "tool_call_id"], @@ -658,14 +663,11 @@ def test_lambda_schemas() -> None: } second_lambda = lambda x, y: (x["hello"], x["bye"], y["bah"]) # noqa: E731 - assert ( - RunnableLambda(second_lambda).input_schema.schema() # type: ignore[arg-type] - == { - "title": "RunnableLambdaInput", - "type": "object", - "properties": {"hello": {"title": "Hello"}, "bye": {"title": "Bye"}}, - } - ) + assert RunnableLambda(second_lambda).input_schema.schema() == { # type: ignore[arg-type] + "title": "RunnableLambdaInput", + "type": "object", + "properties": {"hello": {"title": "Hello"}, "bye": {"title": "Bye"}}, + } def get_value(input): # type: ignore[no-untyped-def] return input["variable_name"] @@ -721,7 +723,9 @@ async def aget_values_typed(input: InputType) -> OutputType: } assert ( - RunnableLambda(aget_values_typed).input_schema.schema() # type: ignore[arg-type] + RunnableLambda( + aget_values_typed # type: ignore[arg-type] + ).input_schema.schema() == { "title": "aget_values_typed_input", "$ref": "#/definitions/InputType", diff --git a/libs/core/tests/unit_tests/test_messages.py b/libs/core/tests/unit_tests/test_messages.py index 6f8b97951ca52..7cd7c9a913b48 100644 --- a/libs/core/tests/unit_tests/test_messages.py +++ b/libs/core/tests/unit_tests/test_messages.py @@ -1,5 +1,5 @@ import unittest -from typing import List +from typing import List, Type import pytest @@ -192,6 +192,21 @@ def test_multiple_msg() -> None: assert messages_from_dict(messages_to_dict(msgs)) == msgs +def test_multiple_msg_with_name() -> None: + human_msg = HumanMessage( + content="human", additional_kwargs={"key": "value"}, name="human erick" + ) + ai_msg = AIMessage(content="ai", name="ai erick") + sys_msg = SystemMessage(content="sys", name="sys erick") + + msgs = [ + human_msg, + ai_msg, + sys_msg, + ] + assert messages_from_dict(messages_to_dict(msgs)) == msgs + + def test_message_chunk_to_message() -> None: assert message_chunk_to_message( AIMessageChunk(content="I am", additional_kwargs={"foo": "bar"}) @@ -480,3 +495,49 @@ def test_convert_to_messages() -> None: HumanMessage(content="Hello!"), AIMessage(content="Hi!"), ] + + +@pytest.mark.parametrize( + "MessageClass", + [ + AIMessage, + AIMessageChunk, + HumanMessage, + HumanMessageChunk, + SystemMessage, + ], +) +def test_message_name(MessageClass: Type) -> None: + msg = MessageClass(content="foo", name="bar") + assert msg.name == "bar" + + msg2 = MessageClass(content="foo", name=None) + assert msg2.name is None + + msg3 = MessageClass(content="foo") + assert msg3.name is None + + +@pytest.mark.parametrize( + "MessageClass", + [FunctionMessage, FunctionMessageChunk], +) +def test_message_name_function(MessageClass: Type) -> None: + # functionmessage doesn't support name=None + msg = MessageClass(name="foo", content="bar") + assert msg.name == "foo" + + +@pytest.mark.parametrize( + "MessageClass", + [ChatMessage, ChatMessageChunk], +) +def test_message_name_chat(MessageClass: Type) -> None: + msg = MessageClass(content="foo", role="user", name="bar") + assert msg.name == "bar" + + msg2 = MessageClass(content="foo", role="user", name=None) + assert msg2.name is None + + msg3 = MessageClass(content="foo", role="user") + assert msg3.name is None diff --git a/libs/core/tests/unit_tests/test_tools.py b/libs/core/tests/unit_tests/test_tools.py index d89f8b2657d66..001ebaa756c2e 100644 --- a/libs/core/tests/unit_tests/test_tools.py +++ b/libs/core/tests/unit_tests/test_tools.py @@ -738,7 +738,7 @@ def handling(e: ValidationError) -> str: ], ) def test_validation_error_handling_non_validation_error( - handler: Union[bool, str, Callable[[ValidationError], str]] + handler: Union[bool, str, Callable[[ValidationError], str]], ) -> None: """Test that validation errors are handled correctly.""" @@ -800,7 +800,7 @@ def handling(e: ValidationError) -> str: ], ) async def test_async_validation_error_handling_non_validation_error( - handler: Union[bool, str, Callable[[ValidationError], str]] + handler: Union[bool, str, Callable[[ValidationError], str]], ) -> None: """Test that validation errors are handled correctly.""" diff --git a/libs/core/tests/unit_tests/tracers/test_memory_stream.py b/libs/core/tests/unit_tests/tracers/test_memory_stream.py new file mode 100644 index 0000000000000..85d16986827ee --- /dev/null +++ b/libs/core/tests/unit_tests/tracers/test_memory_stream.py @@ -0,0 +1,122 @@ +import asyncio +import math +import time +from concurrent.futures import ThreadPoolExecutor +from typing import AsyncIterator + +from langchain_core.tracers.memory_stream import _MemoryStream + + +async def test_same_event_loop() -> None: + """Test that the memory stream works when the same event loop is used. + + This is the easy case. + """ + reader_loop = asyncio.get_event_loop() + channel = _MemoryStream[dict](reader_loop) + writer = channel.get_send_stream() + reader = channel.get_receive_stream() + + async def producer() -> None: + """Produce items with slight delay.""" + tic = time.time() + for i in range(3): + await asyncio.sleep(0.10) + toc = time.time() + await writer.send( + { + "item": i, + "produce_time": toc - tic, + } + ) + await writer.aclose() + + async def consumer() -> AsyncIterator[dict]: + tic = time.time() + async for item in reader: + toc = time.time() + yield { + "receive_time": toc - tic, + **item, + } + + asyncio.create_task(producer()) + + items = [item async for item in consumer()] + + for item in items: + delta_time = item["receive_time"] - item["produce_time"] + # Allow a generous 10ms of delay + # The test is meant to verify that the producer and consumer are running in + # parallel despite the fact that the producer is running from another thread. + # abs_tol is used to allow for some delay in the producer and consumer + # due to overhead. + # To verify that the producer and consumer are running in parallel, we + # expect the delta_time to be smaller than the sleep delay in the producer + # * # of items = 30 ms + assert ( + math.isclose(delta_time, 0, abs_tol=0.010) is True + ), f"delta_time: {delta_time}" + + +async def test_queue_for_streaming_via_sync_call() -> None: + """Test via async -> sync -> async path.""" + reader_loop = asyncio.get_event_loop() + channel = _MemoryStream[dict](reader_loop) + writer = channel.get_send_stream() + reader = channel.get_receive_stream() + + async def producer() -> None: + """Produce items with slight delay.""" + tic = time.time() + for i in range(3): + await asyncio.sleep(0.10) + toc = time.time() + await writer.send( + { + "item": i, + "produce_time": toc - tic, + } + ) + await writer.aclose() + + def sync_call() -> None: + """Blocking sync call.""" + asyncio.run(producer()) + + async def consumer() -> AsyncIterator[dict]: + tic = time.time() + async for item in reader: + toc = time.time() + yield { + "receive_time": toc - tic, + **item, + } + + with ThreadPoolExecutor() as executor: + executor.submit(sync_call) + items = [item async for item in consumer()] + + for item in items: + delta_time = item["receive_time"] - item["produce_time"] + # Allow a generous 10ms of delay + # The test is meant to verify that the producer and consumer are running in + # parallel despite the fact that the producer is running from another thread. + # abs_tol is used to allow for some delay in the producer and consumer + # due to overhead. + # To verify that the producer and consumer are running in parallel, we + # expect the delta_time to be smaller than the sleep delay in the producer + # * # of items = 30 ms + assert ( + math.isclose(delta_time, 0, abs_tol=0.010) is True + ), f"delta_time: {delta_time}" + + +async def test_closed_stream() -> None: + reader_loop = asyncio.get_event_loop() + channel = _MemoryStream[str](reader_loop) + writer = channel.get_send_stream() + reader = channel.get_receive_stream() + await writer.aclose() + + assert [chunk async for chunk in reader] == [] diff --git a/libs/core/tests/unit_tests/utils/test_utils.py b/libs/core/tests/unit_tests/utils/test_utils.py new file mode 100644 index 0000000000000..a69ea2510b41f --- /dev/null +++ b/libs/core/tests/unit_tests/utils/test_utils.py @@ -0,0 +1,104 @@ +import re +from contextlib import AbstractContextManager, nullcontext +from typing import Dict, Optional, Tuple, Type, Union +from unittest.mock import patch + +import pytest + +from langchain_core.utils import check_package_version +from langchain_core.utils._merge import merge_dicts + + +@pytest.mark.parametrize( + ("package", "check_kwargs", "actual_version", "expected"), + [ + ("stub", {"gt_version": "0.1"}, "0.1.2", None), + ("stub", {"gt_version": "0.1.2"}, "0.1.12", None), + ("stub", {"gt_version": "0.1.2"}, "0.1.2", (ValueError, "> 0.1.2")), + ("stub", {"gte_version": "0.1"}, "0.1.2", None), + ("stub", {"gte_version": "0.1.2"}, "0.1.2", None), + ], +) +def test_check_package_version( + package: str, + check_kwargs: Dict[str, Optional[str]], + actual_version: str, + expected: Optional[Tuple[Type[Exception], str]], +) -> None: + with patch("langchain_core.utils.utils.version", return_value=actual_version): + if expected is None: + check_package_version(package, **check_kwargs) + else: + with pytest.raises(expected[0], match=expected[1]): + check_package_version(package, **check_kwargs) + + +@pytest.mark.parametrize( + ("left", "right", "expected"), + ( + # Merge `None` and `1`. + ({"a": None}, {"a": 1}, {"a": 1}), + # Merge `1` and `None`. + ({"a": 1}, {"a": None}, {"a": 1}), + # Merge `None` and a value. + ({"a": None}, {"a": 0}, {"a": 0}), + ({"a": None}, {"a": "txt"}, {"a": "txt"}), + # Merge equal values. + ({"a": 1}, {"a": 1}, {"a": 1}), + ({"a": 1.5}, {"a": 1.5}, {"a": 1.5}), + ({"a": True}, {"a": True}, {"a": True}), + ({"a": False}, {"a": False}, {"a": False}), + ({"a": "txt"}, {"a": "txt"}, {"a": "txt"}), + ({"a": [1, 2]}, {"a": [1, 2]}, {"a": [1, 2]}), + ({"a": {"b": "txt"}}, {"a": {"b": "txt"}}, {"a": {"b": "txt"}}), + # Merge strings. + ({"a": "one"}, {"a": "two"}, {"a": "onetwo"}), + # Merge dicts. + ({"a": {"b": 1}}, {"a": {"c": 2}}, {"a": {"b": 1, "c": 2}}), + ( + {"function_call": {"arguments": None}}, + {"function_call": {"arguments": "{\n"}}, + {"function_call": {"arguments": "{\n"}}, + ), + # Merge lists. + ({"a": [1, 2]}, {"a": [3]}, {"a": [1, 2, 3]}), + ({"a": 1, "b": 2}, {"a": 1}, {"a": 1, "b": 2}), + ({"a": 1, "b": 2}, {"c": None}, {"a": 1, "b": 2, "c": None}), + # + # Invalid inputs. + # + ( + {"a": 1}, + {"a": "1"}, + pytest.raises( + TypeError, + match=re.escape( + 'additional_kwargs["a"] already exists in this message, ' + "but with a different type." + ), + ), + ), + ( + {"a": (1, 2)}, + {"a": (3,)}, + pytest.raises( + TypeError, + match=( + "Additional kwargs key a already exists in left dict and value " + "has unsupported type .+tuple.+." + ), + ), + ), + ), +) +def test_merge_dicts( + left: dict, right: dict, expected: Union[dict, AbstractContextManager] +) -> None: + if isinstance(expected, AbstractContextManager): + err = expected + else: + err = nullcontext() + + with err: + actual = merge_dicts(left, right) + assert actual == expected diff --git a/libs/experimental/langchain_experimental/agents/agent_toolkits/pandas/base.py b/libs/experimental/langchain_experimental/agents/agent_toolkits/pandas/base.py index 0970c39595b70..6fa67dc60f8ca 100644 --- a/libs/experimental/langchain_experimental/agents/agent_toolkits/pandas/base.py +++ b/libs/experimental/langchain_experimental/agents/agent_toolkits/pandas/base.py @@ -10,7 +10,7 @@ RunnableAgent, RunnableMultiActionAgent, ) -from langchain.agents.mrkl.base import ZeroShotAgent +from langchain.agents.mrkl.prompt import FORMAT_INSTRUCTIONS from langchain.agents.openai_functions_agent.base import ( OpenAIFunctionsAgent, create_openai_functions_agent, @@ -18,7 +18,11 @@ from langchain_core.callbacks import BaseCallbackManager from langchain_core.language_models import LanguageModelLike from langchain_core.messages import SystemMessage -from langchain_core.prompts import BasePromptTemplate, ChatPromptTemplate +from langchain_core.prompts import ( + BasePromptTemplate, + ChatPromptTemplate, + PromptTemplate, +) from langchain_core.tools import BaseTool from langchain_core.utils.interactive_env import is_interactive_env @@ -43,7 +47,6 @@ def _get_multi_prompt( suffix: Optional[str] = None, include_df_in_prompt: Optional[bool] = True, number_of_head_rows: int = 5, - tools: Sequence[BaseTool] = (), ) -> BasePromptTemplate: if suffix is not None: suffix_to_use = suffix @@ -53,11 +56,8 @@ def _get_multi_prompt( suffix_to_use = SUFFIX_NO_DF prefix = prefix if prefix is not None else MULTI_DF_PREFIX - prompt = ZeroShotAgent.create_prompt( - tools, - prefix=prefix, - suffix=suffix_to_use, - ) + template = "\n\n".join([prefix, "{tools}", FORMAT_INSTRUCTIONS, suffix_to_use]) + prompt = PromptTemplate.from_template(template) partial_prompt = prompt.partial() if "dfs_head" in partial_prompt.input_variables: dfs_head = "\n\n".join([d.head(number_of_head_rows).to_markdown() for d in dfs]) @@ -74,7 +74,6 @@ def _get_single_prompt( suffix: Optional[str] = None, include_df_in_prompt: Optional[bool] = True, number_of_head_rows: int = 5, - tools: Sequence[BaseTool] = (), ) -> BasePromptTemplate: if suffix is not None: suffix_to_use = suffix @@ -84,11 +83,8 @@ def _get_single_prompt( suffix_to_use = SUFFIX_NO_DF prefix = prefix if prefix is not None else PREFIX - prompt = ZeroShotAgent.create_prompt( - tools, - prefix=prefix, - suffix=suffix_to_use, - ) + template = "\n\n".join([prefix, "{tools}", FORMAT_INSTRUCTIONS, suffix_to_use]) + prompt = PromptTemplate.from_template(template) partial_prompt = prompt.partial() if "df_head" in partial_prompt.input_variables: @@ -257,7 +253,6 @@ def create_pandas_dataframe_agent( suffix=suffix, include_df_in_prompt=include_df_in_prompt, number_of_head_rows=number_of_head_rows, - tools=tools, ) agent: Union[BaseSingleActionAgent, BaseMultiActionAgent] = RunnableAgent( runnable=create_react_agent(llm, tools, prompt), # type: ignore diff --git a/libs/experimental/langchain_experimental/data_anonymizer/presidio.py b/libs/experimental/langchain_experimental/data_anonymizer/presidio.py index f9b35788b08be..fbfad3703e731 100644 --- a/libs/experimental/langchain_experimental/data_anonymizer/presidio.py +++ b/libs/experimental/langchain_experimental/data_anonymizer/presidio.py @@ -428,7 +428,7 @@ def save_deanonymizer_mapping(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(save_path, "w") as f: json.dump(self.deanonymizer_mapping, f, indent=2) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(save_path, "w") as f: yaml.dump(self.deanonymizer_mapping, f, default_flow_style=False) @@ -452,7 +452,7 @@ def load_deanonymizer_mapping(self, file_path: Union[Path, str]) -> None: if load_path.suffix == ".json": with open(load_path, "r") as f: loaded_mapping = json.load(f) - elif load_path.suffix == ".yaml": + elif load_path.suffix.endswith((".yaml", ".yml")): with open(load_path, "r") as f: loaded_mapping = yaml.load(f, Loader=yaml.FullLoader) diff --git a/libs/experimental/langchain_experimental/pal_chain/base.py b/libs/experimental/langchain_experimental/pal_chain/base.py index d30655ed37a52..58593076f4dbb 100644 --- a/libs/experimental/langchain_experimental/pal_chain/base.py +++ b/libs/experimental/langchain_experimental/pal_chain/base.py @@ -21,6 +21,16 @@ from langchain_experimental.pydantic_v1 import Extra, Field COMMAND_EXECUTION_FUNCTIONS = ["system", "exec", "execfile", "eval", "__import__"] +COMMAND_EXECUTION_ATTRIBUTES = [ + "__import__", + "__subclasses__", + "__builtins__", + "__globals__", + "__getattribute__", + "__bases__", + "__mro__", + "__base__", +] class PALValidation: @@ -232,6 +242,15 @@ def validate_code(cls, code: str, code_validations: PALValidation) -> None: or not code_validations.allow_imports ): for node in ast.walk(code_tree): + if ( + not code_validations.allow_command_exec + and isinstance(node, ast.Attribute) + and node.attr in COMMAND_EXECUTION_ATTRIBUTES + ): + raise ValueError( + f"Found illegal command execution function " + f"{node.attr} in code {code}" + ) if (not code_validations.allow_command_exec) and isinstance( node, ast.Call ): diff --git a/libs/experimental/langchain_experimental/prompts/load.py b/libs/experimental/langchain_experimental/prompts/load.py index a7322565778b2..51efdc98db5c5 100644 --- a/libs/experimental/langchain_experimental/prompts/load.py +++ b/libs/experimental/langchain_experimental/prompts/load.py @@ -30,7 +30,7 @@ def _load_prompt_from_file(file: Union[str, Path]) -> BasePromptTemplate: if file_path.suffix == ".json": with open(file_path) as f: config = json.load(f) - elif file_path.suffix == ".yaml": + elif file_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "r") as f: config = yaml.safe_load(f) elif file_path.suffix == ".py": diff --git a/libs/experimental/langchain_experimental/recommenders/__init__.py b/libs/experimental/langchain_experimental/recommenders/__init__.py new file mode 100644 index 0000000000000..ec06f5541894d --- /dev/null +++ b/libs/experimental/langchain_experimental/recommenders/__init__.py @@ -0,0 +1,7 @@ +"""Amazon Personalize primitives.""" +from langchain_experimental.recommenders.amazon_personalize import AmazonPersonalize +from langchain_experimental.recommenders.amazon_personalize_chain import ( + AmazonPersonalizeChain, +) + +__all__ = ["AmazonPersonalize", "AmazonPersonalizeChain"] diff --git a/libs/experimental/langchain_experimental/recommenders/amazon_personalize.py b/libs/experimental/langchain_experimental/recommenders/amazon_personalize.py new file mode 100644 index 0000000000000..b2300f0a19c3a --- /dev/null +++ b/libs/experimental/langchain_experimental/recommenders/amazon_personalize.py @@ -0,0 +1,195 @@ +from typing import Any, List, Mapping, Optional, Sequence + + +class AmazonPersonalize: + """Amazon Personalize Runtime wrapper for executing real-time operations: + https://docs.aws.amazon.com/personalize/latest/dg/API_Operations_Amazon_Personalize_Runtime.html + + Args: + campaign_arn: str, Optional: The Amazon Resource Name (ARN) of the campaign + to use for getting recommendations. + recommender_arn: str, Optional: The Amazon Resource Name (ARN) of the + recommender to use to get recommendations + client: Optional: boto3 client + credentials_profile_name: str, Optional :AWS profile name + region_name: str, Optional: AWS region, e.g., us-west-2 + + Example: + .. code-block:: python + + personalize_client = AmazonPersonalize ( + campaignArn='' ) + """ + + def __init__( + self, + campaign_arn: Optional[str] = None, + recommender_arn: Optional[str] = None, + client: Optional[Any] = None, + credentials_profile_name: Optional[str] = None, + region_name: Optional[str] = None, + ): + self.campaign_arn = campaign_arn + self.recommender_arn = recommender_arn + + if campaign_arn and recommender_arn: + raise ValueError( + "Cannot initialize AmazonPersonalize with both " + "campaign_arn and recommender_arn." + ) + + if not campaign_arn and not recommender_arn: + raise ValueError( + "Cannot initialize AmazonPersonalize. Provide one of " + "campaign_arn or recommender_arn" + ) + + try: + if client is not None: + self.client = client + else: + import boto3 + import botocore.config + + if credentials_profile_name is not None: + session = boto3.Session(profile_name=credentials_profile_name) + else: + # use default credentials + session = boto3.Session() + + client_params = {} + if region_name: + client_params["region_name"] = region_name + + service = "personalize-runtime" + session_config = botocore.config.Config(user_agent_extra="langchain") + client_params["config"] = session_config + self.client = session.client(service, **client_params) + + except ImportError: + raise ModuleNotFoundError( + "Could not import boto3 python package. " + "Please install it with `pip install boto3`." + ) + + def get_recommendations( + self, + user_id: Optional[str] = None, + item_id: Optional[str] = None, + filter_arn: Optional[str] = None, + filter_values: Optional[Mapping[str, str]] = None, + num_results: Optional[int] = 10, + context: Optional[Mapping[str, str]] = None, + promotions: Optional[Sequence[Mapping[str, Any]]] = None, + metadata_columns: Optional[Mapping[str, Sequence[str]]] = None, + **kwargs: Any, + ) -> Mapping[str, Any]: + """Get recommendations from Amazon Personalize: + https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetRecommendations.html + + Args: + user_id: str, Optional: The user identifier + for which to retrieve recommendations + item_id: str, Optional: The item identifier + for which to retrieve recommendations + filter_arn: str, Optional: The ARN of the filter + to apply to the returned recommendations + filter_values: Mapping, Optional: The values + to use when filtering recommendations. + num_results: int, Optional: Default=10: The number of results to return + context: Mapping, Optional: The contextual metadata + to use when getting recommendations + promotions: Sequence, Optional: The promotions + to apply to the recommendation request. + metadata_columns: Mapping, Optional: The metadata Columns to be returned + as part of the response. + + Returns: + response: Mapping[str, Any]: Returns an itemList and recommendationId. + + Example: + .. code-block:: python + + personalize_client = AmazonPersonalize(campaignArn='' )\n + response = personalize_client.get_recommendations(user_id="1") + + """ + if not user_id and not item_id: + raise ValueError("One of user_id or item_id is required") + + if filter_arn: + kwargs["filterArn"] = filter_arn + if filter_values: + kwargs["filterValues"] = filter_values + if user_id: + kwargs["userId"] = user_id + if num_results: + kwargs["numResults"] = num_results + if context: + kwargs["context"] = context + if promotions: + kwargs["promotions"] = promotions + if item_id: + kwargs["itemId"] = item_id + if metadata_columns: + kwargs["metadataColumns"] = metadata_columns + if self.campaign_arn: + kwargs["campaignArn"] = self.campaign_arn + if self.recommender_arn: + kwargs["recommenderArn"] = self.recommender_arn + + return self.client.get_recommendations(**kwargs) + + def get_personalized_ranking( + self, + user_id: str, + input_list: List[str], + filter_arn: Optional[str] = None, + filter_values: Optional[Mapping[str, str]] = None, + context: Optional[Mapping[str, str]] = None, + metadata_columns: Optional[Mapping[str, Sequence[str]]] = None, + **kwargs: Any, + ) -> Mapping[str, Any]: + """Re-ranks a list of recommended items for the given user. + https://docs.aws.amazon.com/personalize/latest/dg/API_RS_GetPersonalizedRanking.html + + Args: + user_id: str, Required: The user identifier + for which to retrieve recommendations + input_list: List[str], Required: A list of items (by itemId) to rank + filter_arn: str, Optional: The ARN of the filter to apply + filter_values: Mapping, Optional: The values to use + when filtering recommendations. + context: Mapping, Optional: The contextual metadata + to use when getting recommendations + metadata_columns: Mapping, Optional: The metadata Columns to be returned + as part of the response. + + Returns: + response: Mapping[str, Any]: Returns personalizedRanking + and recommendationId. + + Example: + .. code-block:: python + + personalize_client = AmazonPersonalize(campaignArn='' )\n + response = personalize_client.get_personalized_ranking(user_id="1", + input_list=["123,"256"]) + + """ + + if filter_arn: + kwargs["filterArn"] = filter_arn + if filter_values: + kwargs["filterValues"] = filter_values + if user_id: + kwargs["userId"] = user_id + if input_list: + kwargs["inputList"] = input_list + if context: + kwargs["context"] = context + if metadata_columns: + kwargs["metadataColumns"] = metadata_columns + kwargs["campaignArn"] = self.campaign_arn + + return self.client.get_personalized_ranking(kwargs) diff --git a/libs/experimental/langchain_experimental/recommenders/amazon_personalize_chain.py b/libs/experimental/langchain_experimental/recommenders/amazon_personalize_chain.py new file mode 100644 index 0000000000000..4c187a8006463 --- /dev/null +++ b/libs/experimental/langchain_experimental/recommenders/amazon_personalize_chain.py @@ -0,0 +1,192 @@ +from __future__ import annotations + +from typing import Any, Dict, List, Mapping, Optional, cast + +from langchain.callbacks.manager import ( + CallbackManagerForChainRun, +) +from langchain.chains import LLMChain +from langchain.chains.base import Chain +from langchain.prompts.prompt import PromptTemplate +from langchain.schema.language_model import BaseLanguageModel + +from langchain_experimental.recommenders.amazon_personalize import AmazonPersonalize + +SUMMARIZE_PROMPT_QUERY = """ +Summarize the recommended items for a user from the items list in tag below. +Make correlation into the items in the list and provide a summary. + + {result} + +""" + +SUMMARIZE_PROMPT = PromptTemplate( + input_variables=["result"], template=SUMMARIZE_PROMPT_QUERY +) + +INTERMEDIATE_STEPS_KEY = "intermediate_steps" + +# Input Key Names to be used +USER_ID_INPUT_KEY = "user_id" +ITEM_ID_INPUT_KEY = "item_id" +INPUT_LIST_INPUT_KEY = "input_list" +FILTER_ARN_INPUT_KEY = "filter_arn" +FILTER_VALUES_INPUT_KEY = "filter_values" +CONTEXT_INPUT_KEY = "context" +PROMOTIONS_INPUT_KEY = "promotions" +METADATA_COLUMNS_INPUT_KEY = "metadata_columns" +RESULT_OUTPUT_KEY = "result" + + +class AmazonPersonalizeChain(Chain): + """Amazon Personalize Chain for retrieving recommendations + from Amazon Personalize, and summarizing + the recommendations in natural language. + It will only return recommendations if return_direct=True. + Can also be used in sequential chains for working with + the output of Amazon Personalize. + + Example: + .. code-block:: python + + chain = PersonalizeChain.from_llm(llm=agent_llm, client=personalize_lg, + return_direct=True)\n + response = chain.run({'user_id':'1'})\n + response = chain.run({'user_id':'1', 'item_id':'234'}) + """ + + client: AmazonPersonalize + summarization_chain: LLMChain + return_direct: bool = False + return_intermediate_steps: bool = False + is_ranking_recipe: bool = False + + @property + def input_keys(self) -> List[str]: + """This returns an empty list since not there are optional + input_keys and none is required. + + :meta private: + """ + return [] + + @property + def output_keys(self) -> List[str]: + """Will always return result key. + + :meta private: + """ + return [RESULT_OUTPUT_KEY] + + @classmethod + def from_llm( + cls, + llm: BaseLanguageModel, + client: AmazonPersonalize, + prompt_template: PromptTemplate = SUMMARIZE_PROMPT, + is_ranking_recipe: bool = False, + **kwargs: Any, + ) -> AmazonPersonalizeChain: + """Initializes the Personalize Chain with LLMAgent, Personalize Client, + Prompts to be used + + Args: + llm: BaseLanguageModel: The LLM to be used in the Chain + client: AmazonPersonalize: The client created to support + invoking AmazonPersonalize + prompt_template: PromptTemplate: The prompt template which can be + invoked with the output from Amazon Personalize + is_ranking_recipe: bool: default: False: specifies + if the trained recipe is USER_PERSONALIZED_RANKING + + Example: + .. code-block:: python + + chain = PersonalizeChain.from_llm(llm=agent_llm, + client=personalize_lg, return_direct=True)\n + response = chain.run({'user_id':'1'})\n + response = chain.run({'user_id':'1', 'item_id':'234'}) + + RANDOM_PROMPT_QUERY=" Summarize recommendations in {result}" + chain = PersonalizeChain.from_llm(llm=agent_llm, + client=personalize_lg, prompt_template=PROMPT_TEMPLATE)\n + """ + summarization_chain = LLMChain(llm=llm, prompt=prompt_template) + + return cls( + summarization_chain=summarization_chain, + client=client, + is_ranking_recipe=is_ranking_recipe, + **kwargs, + ) + + def _call( + self, + inputs: Mapping[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, Any]: + """Retrieves recommendations by invoking Amazon Personalize, + and invokes an LLM using the default/overridden + prompt template with the output from Amazon Personalize + + Args: + inputs: Mapping [str, Any] : Provide input identifiers in a map. + For example - {'user_id','1'} or + {'user_id':'1', 'item_id':'123'}. You can also pass the + filter_arn, filter_values as an + input. + """ + _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() + callbacks = _run_manager.get_child() + + user_id = inputs.get(USER_ID_INPUT_KEY) + item_id = inputs.get(ITEM_ID_INPUT_KEY) + input_list = inputs.get(INPUT_LIST_INPUT_KEY) + filter_arn = inputs.get(FILTER_ARN_INPUT_KEY) + filter_values = inputs.get(FILTER_VALUES_INPUT_KEY) + promotions = inputs.get(PROMOTIONS_INPUT_KEY) + context = inputs.get(CONTEXT_INPUT_KEY) + metadata_columns = inputs.get(METADATA_COLUMNS_INPUT_KEY) + + intermediate_steps: List = [] + intermediate_steps.append({"Calling Amazon Personalize"}) + + if self.is_ranking_recipe: + response = self.client.get_personalized_ranking( + user_id=str(user_id), + input_list=cast(List[str], input_list), + filter_arn=filter_arn, + filter_values=filter_values, + context=context, + metadata_columns=metadata_columns, + ) + else: + response = self.client.get_recommendations( + user_id=user_id, + item_id=item_id, + filter_arn=filter_arn, + filter_values=filter_values, + context=context, + promotions=promotions, + metadata_columns=metadata_columns, + ) + + _run_manager.on_text("Call to Amazon Personalize complete \n") + + if self.return_direct: + final_result = response + else: + result = self.summarization_chain( + {RESULT_OUTPUT_KEY: response}, callbacks=callbacks + ) + final_result = result[self.summarization_chain.output_key] + + intermediate_steps.append({"context": response}) + chain_result: Dict[str, Any] = {RESULT_OUTPUT_KEY: final_result} + if self.return_intermediate_steps: + chain_result[INTERMEDIATE_STEPS_KEY] = intermediate_steps + return chain_result + + @property + def _chain_type(self) -> str: + return "amazon_personalize_chain" diff --git a/libs/experimental/langchain_experimental/smart_llm/base.py b/libs/experimental/langchain_experimental/smart_llm/base.py index d9d8929cb07cb..d69fa208013a6 100644 --- a/libs/experimental/langchain_experimental/smart_llm/base.py +++ b/libs/experimental/langchain_experimental/smart_llm/base.py @@ -215,7 +215,7 @@ def get_prompt_strings( HumanMessagePromptTemplate, "You are a researcher tasked with investigating the " f"{self.n_ideas} response options provided. List the flaws and " - "faulty logic of each answer options. Let'w work this out in a step" + "faulty logic of each answer option. Let's work this out in a step" " by step way to be sure we have all the errors:", ), ] @@ -229,10 +229,10 @@ def get_prompt_strings( HumanMessagePromptTemplate, "You are a resolver tasked with 1) finding which of " f"the {self.n_ideas} answer options the researcher thought was " - "best,2) improving that answer and 3) printing the answer in full. " - "Don't output anything for step 1 or 2, only the full answer in 3. " - "Let's work this out in a step by step way to be sure we have " - "the right answer:", + "best, 2) improving that answer and 3) printing the answer in " + "full. Don't output anything for step 1 or 2, only the full " + "answer in 3. Let's work this out in a step by step way to " + "be sure we have the right answer:", ), ] ) diff --git a/libs/experimental/poetry.lock b/libs/experimental/poetry.lock index 4e75e3d6904b1..f575216ce2b9e 100644 --- a/libs/experimental/poetry.lock +++ b/libs/experimental/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "aiohttp" @@ -1328,7 +1328,6 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -1642,7 +1641,7 @@ files = [ [[package]] name = "langchain" -version = "0.1.5" +version = "0.1.7" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1654,9 +1653,9 @@ aiohttp = "^3.8.3" async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">= 0.5.7, < 0.7" jsonpatch = "^1.33" -langchain-community = ">=0.0.17,<0.1" -langchain-core = ">=0.1.16,<0.2" -langsmith = ">=0.0.83,<0.1" +langchain-community = ">=0.0.20,<0.1" +langchain-core = ">=0.1.22,<0.2" +langsmith = "^0.1.0" numpy = "^1" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -1685,7 +1684,7 @@ url = "../langchain" [[package]] name = "langchain-community" -version = "0.0.17" +version = "0.0.20" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" @@ -1695,8 +1694,8 @@ develop = true [package.dependencies] aiohttp = "^3.8.3" dataclasses-json = ">= 0.5.7, < 0.7" -langchain-core = ">=0.1.16,<0.2" -langsmith = ">=0.0.83,<0.1" +langchain-core = ">=0.1.21,<0.2" +langsmith = "^0.1.0" numpy = "^1" PyYAML = ">=5.3" requests = "^2" @@ -1705,7 +1704,7 @@ tenacity = "^8.1.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "dashvector (>=1.0.1,<2.0.0)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [package.source] type = "directory" @@ -1713,7 +1712,7 @@ url = "../community" [[package]] name = "langchain-core" -version = "0.1.18" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1723,7 +1722,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.1.0" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -1753,13 +1752,13 @@ data = ["language-data (>=1.1,<2.0)"] [[package]] name = "langsmith" -version = "0.0.83" +version = "0.1.1" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, ] [package.dependencies] @@ -1793,16 +1792,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2317,6 +2306,73 @@ files = [ {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, ] +[[package]] +name = "pandas" +version = "2.0.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pandas-2.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4c7c9f27a4185304c7caf96dc7d91bc60bc162221152de697c98eb0b2648dd8"}, + {file = "pandas-2.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f167beed68918d62bffb6ec64f2e1d8a7d297a038f86d4aed056b9493fca407f"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce0c6f76a0f1ba361551f3e6dceaff06bde7514a374aa43e33b588ec10420183"}, + {file = "pandas-2.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba619e410a21d8c387a1ea6e8a0e49bb42216474436245718d7f2e88a2f8d7c0"}, + {file = "pandas-2.0.3-cp310-cp310-win32.whl", hash = "sha256:3ef285093b4fe5058eefd756100a367f27029913760773c8bf1d2d8bebe5d210"}, + {file = "pandas-2.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:9ee1a69328d5c36c98d8e74db06f4ad518a1840e8ccb94a4ba86920986bb617e"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b084b91d8d66ab19f5bb3256cbd5ea661848338301940e17f4492b2ce0801fe8"}, + {file = "pandas-2.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:37673e3bdf1551b95bf5d4ce372b37770f9529743d2498032439371fc7b7eb26"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9cb1e14fdb546396b7e1b923ffaeeac24e4cedd14266c3497216dd4448e4f2d"}, + {file = "pandas-2.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9cd88488cceb7635aebb84809d087468eb33551097d600c6dad13602029c2df"}, + {file = "pandas-2.0.3-cp311-cp311-win32.whl", hash = "sha256:694888a81198786f0e164ee3a581df7d505024fbb1f15202fc7db88a71d84ebd"}, + {file = "pandas-2.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:6a21ab5c89dcbd57f78d0ae16630b090eec626360085a4148693def5452d8a6b"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9e4da0d45e7f34c069fe4d522359df7d23badf83abc1d1cef398895822d11061"}, + {file = "pandas-2.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:32fca2ee1b0d93dd71d979726b12b61faa06aeb93cf77468776287f41ff8fdc5"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:258d3624b3ae734490e4d63c430256e716f488c4fcb7c8e9bde2d3aa46c29089"}, + {file = "pandas-2.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eae3dc34fa1aa7772dd3fc60270d13ced7346fcbcfee017d3132ec625e23bb0"}, + {file = "pandas-2.0.3-cp38-cp38-win32.whl", hash = "sha256:f3421a7afb1a43f7e38e82e844e2bca9a6d793d66c1a7f9f0ff39a795bbc5e02"}, + {file = "pandas-2.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:69d7f3884c95da3a31ef82b7618af5710dba95bb885ffab339aad925c3e8ce78"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5247fb1ba347c1261cbbf0fcfba4a3121fbb4029d95d9ef4dc45406620b25c8b"}, + {file = "pandas-2.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:81af086f4543c9d8bb128328b5d32e9986e0c84d3ee673a2ac6fb57fd14f755e"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1994c789bf12a7c5098277fb43836ce090f1073858c10f9220998ac74f37c69b"}, + {file = "pandas-2.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ec591c48e29226bcbb316e0c1e9423622bc7a4eaf1ef7c3c9fa1a3981f89641"}, + {file = "pandas-2.0.3-cp39-cp39-win32.whl", hash = "sha256:04dbdbaf2e4d46ca8da896e1805bc04eb85caa9a82e259e8eed00254d5e0c682"}, + {file = "pandas-2.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:1168574b036cd8b93abc746171c9b4f1b83467438a5e45909fed645cf8692dbc"}, + {file = "pandas-2.0.3.tar.gz", hash = "sha256:c02f372a88e0d17f36d3093a644c73cfc1788e876a7c4bcb4020a77512e2043c"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.20.3", markers = "python_version < \"3.10\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.1)", "SQLAlchemy (>=1.4.16)", "beautifulsoup4 (>=4.9.3)", "bottleneck (>=1.3.2)", "brotlipy (>=0.7.0)", "fastparquet (>=0.6.3)", "fsspec (>=2021.07.0)", "gcsfs (>=2021.07.0)", "html5lib (>=1.1)", "hypothesis (>=6.34.2)", "jinja2 (>=3.0.0)", "lxml (>=4.6.3)", "matplotlib (>=3.6.1)", "numba (>=0.53.1)", "numexpr (>=2.7.3)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pandas-gbq (>=0.15.0)", "psycopg2 (>=2.8.6)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "python-snappy (>=0.6.0)", "pyxlsb (>=1.0.8)", "qtpy (>=2.2.0)", "s3fs (>=2021.08.0)", "scipy (>=1.7.1)", "tables (>=3.6.1)", "tabulate (>=0.8.9)", "xarray (>=0.21.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)", "zstandard (>=0.15.2)"] +aws = ["s3fs (>=2021.08.0)"] +clipboard = ["PyQt5 (>=5.15.1)", "qtpy (>=2.2.0)"] +compression = ["brotlipy (>=0.7.0)", "python-snappy (>=0.6.0)", "zstandard (>=0.15.2)"] +computation = ["scipy (>=1.7.1)", "xarray (>=0.21.0)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.7)", "pyxlsb (>=1.0.8)", "xlrd (>=2.0.1)", "xlsxwriter (>=1.4.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2021.07.0)"] +gcp = ["gcsfs (>=2021.07.0)", "pandas-gbq (>=0.15.0)"] +hdf5 = ["tables (>=3.6.1)"] +html = ["beautifulsoup4 (>=4.9.3)", "html5lib (>=1.1)", "lxml (>=4.6.3)"] +mysql = ["SQLAlchemy (>=1.4.16)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.0.0)", "tabulate (>=0.8.9)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.2)", "numba (>=0.53.1)", "numexpr (>=2.7.1)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.16)", "psycopg2 (>=2.8.6)"] +spss = ["pyreadstat (>=1.1.2)"] +sql-other = ["SQLAlchemy (>=1.4.16)"] +test = ["hypothesis (>=6.34.2)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.6.3)"] + [[package]] name = "pandocfilters" version = "1.5.0" @@ -2978,7 +3034,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -2986,15 +3041,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3011,7 +3059,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3019,7 +3066,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -4017,7 +4063,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\""} +greenlet = {version = "!=0.4.17", markers = "platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\""} typing-extensions = ">=4.2.0" [package.extras] @@ -4124,6 +4170,20 @@ files = [ [package.dependencies] mpmath = ">=0.19" +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + [[package]] name = "tenacity" version = "8.2.3" @@ -4701,6 +4761,17 @@ files = [ mypy-extensions = ">=0.3.0" typing-extensions = ">=3.7.4" +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = true +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + [[package]] name = "uri-template" version = "1.3.0" @@ -4962,9 +5033,9 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.link testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] -extended-testing = ["faker", "jinja2", "presidio-analyzer", "presidio-anonymizer", "sentence-transformers", "vowpal-wabbit-next"] +extended-testing = ["faker", "jinja2", "pandas", "presidio-analyzer", "presidio-anonymizer", "sentence-transformers", "tabulate", "vowpal-wabbit-next"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "11321a2551a49e9cb432348f03e991d1f13368624b1e4df5a721b794114b4f02" +content-hash = "0e4b297b0a8c595fbfe1e8a00d5a13057b1bdd4a0ce08d415ca4c4a7712cee88" diff --git a/libs/experimental/pyproject.toml b/libs/experimental/pyproject.toml index b3386cc600c35..be3246167aac8 100644 --- a/libs/experimental/pyproject.toml +++ b/libs/experimental/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-experimental" -version = "0.0.50" +version = "0.0.51" description = "Building applications with LLMs through composability" authors = [] license = "MIT" @@ -18,6 +18,8 @@ faker = {version = "^19.3.1", optional = true} vowpal-wabbit-next = {version = "0.6.0", optional = true} sentence-transformers = {version = "^2", optional = true} jinja2 = {version = "^3", optional = true} +pandas = { version = "^2.0.1", optional = true } +tabulate = {version = "^0.9.0", optional = true} [tool.poetry.group.lint] optional = true @@ -79,6 +81,8 @@ extended_testing = [ "vowpal-wabbit-next", "sentence-transformers", "jinja2", + "pandas", + "tabulate", ] [tool.ruff.lint] diff --git a/libs/experimental/tests/unit_tests/agents/__init__.py b/libs/experimental/tests/unit_tests/agents/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/experimental/tests/unit_tests/agents/agent_toolkits/__init__.py b/libs/experimental/tests/unit_tests/agents/agent_toolkits/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/experimental/tests/unit_tests/agents/agent_toolkits/pandas/__init__.py b/libs/experimental/tests/unit_tests/agents/agent_toolkits/pandas/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/experimental/tests/unit_tests/agents/agent_toolkits/pandas/test_base.py b/libs/experimental/tests/unit_tests/agents/agent_toolkits/pandas/test_base.py new file mode 100644 index 0000000000000..8f3a063f6881e --- /dev/null +++ b/libs/experimental/tests/unit_tests/agents/agent_toolkits/pandas/test_base.py @@ -0,0 +1,15 @@ +import sys + +import pytest + +from langchain_experimental.agents import create_pandas_dataframe_agent +from tests.unit_tests.fake_llm import FakeLLM + + +@pytest.mark.requires("pandas", "tabulate") +@pytest.mark.skipif(sys.version_info < (3, 9), reason="requires python3.9 or higher") +def test_create_pandas_dataframe_agent() -> None: + import pandas as pd + + create_pandas_dataframe_agent(FakeLLM(), pd.DataFrame()) + create_pandas_dataframe_agent(FakeLLM(), [pd.DataFrame(), pd.DataFrame()]) diff --git a/libs/langchain/README.md b/libs/langchain/README.md index 2c8d69bcb4919..bd16bdf6e6fc8 100644 --- a/libs/langchain/README.md +++ b/libs/langchain/README.md @@ -20,7 +20,7 @@ Looking for the JS/TS version? Check out [LangChain.js](https://github.com/langc To help you ship LangChain apps to production faster, check out [LangSmith](https://smith.langchain.com). [LangSmith](https://smith.langchain.com) is a unified developer platform for building, testing, and monitoring LLM applications. -Fill out [this form](https://airtable.com/appwQzlErAS2qiP0L/shrGtGaVBVAz7NcV2) to get off the waitlist or speak with our sales team +Fill out [this form](https://www.langchain.com/contact-sales) to speak with our sales team. ## Quick Install diff --git a/libs/langchain/langchain/agents/agent.py b/libs/langchain/langchain/agents/agent.py index 8bd7b6d478ab5..e6f9df4939853 100644 --- a/libs/langchain/langchain/agents/agent.py +++ b/libs/langchain/langchain/agents/agent.py @@ -185,7 +185,7 @@ def save(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(file_path, "w") as f: json.dump(agent_dict, f, indent=4) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "w") as f: yaml.dump(agent_dict, f, default_flow_style=False) else: @@ -310,7 +310,7 @@ def save(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(file_path, "w") as f: json.dump(agent_dict, f, indent=4) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "w") as f: yaml.dump(agent_dict, f, default_flow_style=False) else: diff --git a/libs/langchain/langchain/chains/__init__.py b/libs/langchain/langchain/chains/__init__.py index 2b7ba6ac256bb..3a34b82ed512b 100644 --- a/libs/langchain/langchain/chains/__init__.py +++ b/libs/langchain/langchain/chains/__init__.py @@ -41,6 +41,7 @@ from langchain.chains.graph_qa.kuzu import KuzuQAChain from langchain.chains.graph_qa.nebulagraph import NebulaGraphQAChain from langchain.chains.graph_qa.neptune_cypher import NeptuneOpenCypherQAChain +from langchain.chains.graph_qa.neptune_sparql import NeptuneSparqlQAChain from langchain.chains.graph_qa.ontotext_graphdb import OntotextGraphDBQAChain from langchain.chains.graph_qa.sparql import GraphSparqlQAChain from langchain.chains.history_aware_retriever import create_history_aware_retriever @@ -81,6 +82,7 @@ ) from langchain.chains.sequential import SequentialChain, SimpleSequentialChain from langchain.chains.sql_database.query import create_sql_query_chain +from langchain.chains.structured_output import create_structured_output_runnable from langchain.chains.summarize import load_summarize_chain from langchain.chains.transform import TransformChain @@ -116,6 +118,7 @@ "NatBotChain", "NebulaGraphQAChain", "NeptuneOpenCypherQAChain", + "NeptuneSparqlQAChain", "OpenAIModerationChain", "OpenAPIEndpointChain", "QAGenerationChain", @@ -143,5 +146,6 @@ "create_sql_query_chain", "create_retrieval_chain", "create_history_aware_retriever", + "create_structured_output_runnable", "load_summarize_chain", ] diff --git a/libs/langchain/langchain/chains/base.py b/libs/langchain/langchain/chains/base.py index dc665ae9978c4..5c5bd1aadfa0c 100644 --- a/libs/langchain/langchain/chains/base.py +++ b/libs/langchain/langchain/chains/base.py @@ -146,24 +146,28 @@ def invoke( self.metadata, ) new_arg_supported = inspect.signature(self._call).parameters.get("run_manager") + run_manager = callback_manager.on_chain_start( dumpd(self), inputs, name=run_name, ) try: + self._validate_inputs(inputs) outputs = ( self._call(inputs, run_manager=run_manager) if new_arg_supported else self._call(inputs) ) + + final_outputs: Dict[str, Any] = self.prep_outputs( + inputs, outputs, return_only_outputs + ) except BaseException as e: run_manager.on_chain_error(e) raise e run_manager.on_chain_end(outputs) - final_outputs: Dict[str, Any] = self.prep_outputs( - inputs, outputs, return_only_outputs - ) + if include_run_info: final_outputs[RUN_KEY] = RunInfo(run_id=run_manager.run_id) return final_outputs @@ -199,18 +203,20 @@ async def ainvoke( name=run_name, ) try: + self._validate_inputs(inputs) outputs = ( await self._acall(inputs, run_manager=run_manager) if new_arg_supported else await self._acall(inputs) ) + final_outputs: Dict[str, Any] = self.prep_outputs( + inputs, outputs, return_only_outputs + ) except BaseException as e: await run_manager.on_chain_error(e) raise e await run_manager.on_chain_end(outputs) - final_outputs: Dict[str, Any] = self.prep_outputs( - inputs, outputs, return_only_outputs - ) + if include_run_info: final_outputs[RUN_KEY] = RunInfo(run_id=run_manager.run_id) return final_outputs @@ -259,6 +265,20 @@ def output_keys(self) -> List[str]: def _validate_inputs(self, inputs: Dict[str, Any]) -> None: """Check that all inputs are present.""" + if not isinstance(inputs, dict): + _input_keys = set(self.input_keys) + if self.memory is not None: + # If there are multiple input keys, but some get set by memory so that + # only one is not set, we can still figure out which key it is. + _input_keys = _input_keys.difference(self.memory.memory_variables) + if len(_input_keys) != 1: + raise ValueError( + f"A single string input was passed in, but this chain expects " + f"multiple inputs ({_input_keys}). When a chain expects " + f"multiple inputs, please call it by passing in a dictionary, " + "eg `chain({'foo': 1, 'bar': 2})`" + ) + missing_keys = set(self.input_keys).difference(inputs) if missing_keys: raise ValueError(f"Missing some input keys: {missing_keys}") @@ -444,7 +464,7 @@ def prep_outputs( return {**inputs, **outputs} def prep_inputs(self, inputs: Union[Dict[str, Any], Any]) -> Dict[str, str]: - """Validate and prepare chain inputs, including adding inputs from memory. + """Prepare chain inputs, including adding inputs from memory. Args: inputs: Dictionary of raw inputs, or single input if chain expects @@ -461,18 +481,10 @@ def prep_inputs(self, inputs: Union[Dict[str, Any], Any]) -> Dict[str, str]: # If there are multiple input keys, but some get set by memory so that # only one is not set, we can still figure out which key it is. _input_keys = _input_keys.difference(self.memory.memory_variables) - if len(_input_keys) != 1: - raise ValueError( - f"A single string input was passed in, but this chain expects " - f"multiple inputs ({_input_keys}). When a chain expects " - f"multiple inputs, please call it by passing in a dictionary, " - "eg `chain({'foo': 1, 'bar': 2})`" - ) inputs = {list(_input_keys)[0]: inputs} if self.memory is not None: external_context = self.memory.load_memory_variables(inputs) inputs = dict(inputs, **external_context) - self._validate_inputs(inputs) return inputs @property @@ -687,7 +699,7 @@ def save(self, file_path: Union[Path, str]) -> None: if save_path.suffix == ".json": with open(file_path, "w") as f: json.dump(chain_dict, f, indent=4) - elif save_path.suffix == ".yaml": + elif save_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "w") as f: yaml.dump(chain_dict, f, default_flow_style=False) else: diff --git a/libs/langchain/langchain/chains/combine_documents/reduce.py b/libs/langchain/langchain/chains/combine_documents/reduce.py index 00e3efd02e076..a55c067d1680f 100644 --- a/libs/langchain/langchain/chains/combine_documents/reduce.py +++ b/libs/langchain/langchain/chains/combine_documents/reduce.py @@ -200,6 +200,10 @@ class ReduceDocumentsChain(BaseCombineDocumentsChain): """The maximum number of tokens to group documents into. For example, if set to 3000 then documents will be grouped into chunks of no greater than 3000 tokens before trying to combine them into a smaller chunk.""" + collapse_max_retries: Optional[int] = None + """The maximum number of retries to collapse documents to fit token_max. + If None, it will keep trying to collapse documents to fit token_max. + Otherwise, after it reaches the max number, it will throw an error""" class Config: """Configuration for this pydantic object.""" @@ -289,6 +293,7 @@ def _collapse_docs_func(docs: List[Document], **kwargs: Any) -> str: ) _token_max = token_max or self.token_max + retries: int = 0 while num_tokens is not None and num_tokens > _token_max: new_result_doc_list = split_list_of_docs( result_docs, length_func, _token_max, **kwargs @@ -298,6 +303,12 @@ def _collapse_docs_func(docs: List[Document], **kwargs: Any) -> str: new_doc = collapse_docs(docs, _collapse_docs_func, **kwargs) result_docs.append(new_doc) num_tokens = length_func(result_docs, **kwargs) + retries += 1 + if self.collapse_max_retries and retries == self.collapse_max_retries: + raise ValueError( + f"Exceed {self.collapse_max_retries} tries to \ + collapse document to {_token_max} tokens." + ) return result_docs, {} async def _acollapse( @@ -317,6 +328,7 @@ async def _collapse_docs_func(docs: List[Document], **kwargs: Any) -> str: ) _token_max = token_max or self.token_max + retries: int = 0 while num_tokens is not None and num_tokens > _token_max: new_result_doc_list = split_list_of_docs( result_docs, length_func, _token_max, **kwargs @@ -326,6 +338,12 @@ async def _collapse_docs_func(docs: List[Document], **kwargs: Any) -> str: new_doc = await acollapse_docs(docs, _collapse_docs_func, **kwargs) result_docs.append(new_doc) num_tokens = length_func(result_docs, **kwargs) + retries += 1 + if self.collapse_max_retries and retries == self.collapse_max_retries: + raise ValueError( + f"Exceed {self.collapse_max_retries} tries to \ + collapse document to {_token_max} tokens." + ) return result_docs, {} @property diff --git a/libs/langchain/langchain/chains/graph_qa/neptune_sparql.py b/libs/langchain/langchain/chains/graph_qa/neptune_sparql.py new file mode 100644 index 0000000000000..08a1cc249beed --- /dev/null +++ b/libs/langchain/langchain/chains/graph_qa/neptune_sparql.py @@ -0,0 +1,196 @@ +""" +Question answering over an RDF or OWL graph using SPARQL. +""" +from __future__ import annotations + +from typing import Any, Dict, List, Optional + +from langchain_community.graphs import NeptuneRdfGraph +from langchain_core.language_models import BaseLanguageModel +from langchain_core.prompts.base import BasePromptTemplate +from langchain_core.prompts.prompt import PromptTemplate +from langchain_core.pydantic_v1 import Field + +from langchain.callbacks.manager import CallbackManagerForChainRun +from langchain.chains.base import Chain +from langchain.chains.graph_qa.prompts import SPARQL_QA_PROMPT +from langchain.chains.llm import LLMChain + +INTERMEDIATE_STEPS_KEY = "intermediate_steps" + +SPARQL_GENERATION_TEMPLATE = """ +Task: Generate a SPARQL SELECT statement for querying a graph database. +For instance, to find all email addresses of John Doe, the following +query in backticks would be suitable: +``` +PREFIX foaf: +SELECT ?email +WHERE {{ + ?person foaf:name "John Doe" . + ?person foaf:mbox ?email . +}} +``` +Instructions: +Use only the node types and properties provided in the schema. +Do not use any node types and properties that are not explicitly provided. +Include all necessary prefixes. + +Examples: + +Schema: +{schema} +Note: Be as concise as possible. +Do not include any explanations or apologies in your responses. +Do not respond to any questions that ask for anything else than +for you to construct a SPARQL query. +Do not include any text except the SPARQL query generated. + +The question is: +{prompt}""" + +SPARQL_GENERATION_PROMPT = PromptTemplate( + input_variables=["schema", "prompt"], template=SPARQL_GENERATION_TEMPLATE +) + + +def extract_sparql(query: str) -> str: + query = query.strip() + querytoks = query.split("```") + if len(querytoks) == 3: + query = querytoks[1] + + if query.startswith("sparql"): + query = query[6:] + elif query.startswith("") and query.endswith(""): + query = query[8:-9] + return query + + +class NeptuneSparqlQAChain(Chain): + """Chain for question-answering against a Neptune graph + by generating SPARQL statements. + + *Security note*: Make sure that the database connection uses credentials + that are narrowly-scoped to only include necessary permissions. + Failure to do so may result in data corruption or loss, since the calling + code may attempt commands that would result in deletion, mutation + of data if appropriately prompted or reading sensitive data if such + data is present in the database. + The best way to guard against such negative outcomes is to (as appropriate) + limit the permissions granted to the credentials used with this tool. + + See https://python.langchain.com/docs/security for more information. + + Example: + .. code-block:: python + + chain = NeptuneSparqlQAChain.from_llm( + llm=llm, + graph=graph + ) + response = chain.invoke(query) + """ + + graph: NeptuneRdfGraph = Field(exclude=True) + sparql_generation_chain: LLMChain + qa_chain: LLMChain + input_key: str = "query" #: :meta private: + output_key: str = "result" #: :meta private: + top_k: int = 10 + return_intermediate_steps: bool = False + """Whether or not to return the intermediate steps along with the final answer.""" + return_direct: bool = False + """Whether or not to return the result of querying the graph directly.""" + extra_instructions: Optional[str] = None + """Extra instructions by the appended to the query generation prompt.""" + + @property + def input_keys(self) -> List[str]: + return [self.input_key] + + @property + def output_keys(self) -> List[str]: + _output_keys = [self.output_key] + return _output_keys + + @classmethod + def from_llm( + cls, + llm: BaseLanguageModel, + *, + qa_prompt: BasePromptTemplate = SPARQL_QA_PROMPT, + sparql_prompt: BasePromptTemplate = SPARQL_GENERATION_PROMPT, + examples: Optional[str] = None, + **kwargs: Any, + ) -> NeptuneSparqlQAChain: + """Initialize from LLM.""" + qa_chain = LLMChain(llm=llm, prompt=qa_prompt) + template_to_use = SPARQL_GENERATION_TEMPLATE + if examples: + template_to_use = template_to_use.replace( + "Examples:", "Examples: " + examples + ) + sparql_prompt = PromptTemplate( + input_variables=["schema", "prompt"], template=template_to_use + ) + sparql_generation_chain = LLMChain(llm=llm, prompt=sparql_prompt) + + return cls( + qa_chain=qa_chain, + sparql_generation_chain=sparql_generation_chain, + examples=examples, + **kwargs, + ) + + def _call( + self, + inputs: Dict[str, Any], + run_manager: Optional[CallbackManagerForChainRun] = None, + ) -> Dict[str, str]: + """ + Generate SPARQL query, use it to retrieve a response from the gdb and answer + the question. + """ + _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager() + callbacks = _run_manager.get_child() + prompt = inputs[self.input_key] + + intermediate_steps: List = [] + + generated_sparql = self.sparql_generation_chain.run( + {"prompt": prompt, "schema": self.graph.get_schema}, callbacks=callbacks + ) + + # Extract SPARQL + generated_sparql = extract_sparql(generated_sparql) + + _run_manager.on_text("Generated SPARQL:", end="\n", verbose=self.verbose) + _run_manager.on_text( + generated_sparql, color="green", end="\n", verbose=self.verbose + ) + + intermediate_steps.append({"query": generated_sparql}) + + context = self.graph.query(generated_sparql) + + if self.return_direct: + final_result = context + else: + _run_manager.on_text("Full Context:", end="\n", verbose=self.verbose) + _run_manager.on_text( + str(context), color="green", end="\n", verbose=self.verbose + ) + + intermediate_steps.append({"context": context}) + + result = self.qa_chain( + {"prompt": prompt, "context": context}, + callbacks=callbacks, + ) + final_result = result[self.qa_chain.output_key] + + chain_result: Dict[str, Any] = {self.output_key: final_result} + if self.return_intermediate_steps: + chain_result[INTERMEDIATE_STEPS_KEY] = intermediate_steps + + return chain_result diff --git a/libs/langchain/langchain/chains/graph_qa/prompts.py b/libs/langchain/langchain/chains/graph_qa/prompts.py index 5d4652453b619..d83aef9a6224e 100644 --- a/libs/langchain/langchain/chains/graph_qa/prompts.py +++ b/libs/langchain/langchain/chains/graph_qa/prompts.py @@ -100,6 +100,13 @@ The information part contains the provided information that you must use to construct an answer. The provided information is authoritative, you must never doubt it or try to use your internal knowledge to correct it. Make the answer sound as a response to the question. Do not mention that you based the result on the given information. +Here is an example: + +Question: Which managers own Neo4j stocks? +Context:[manager:CTL LLC, manager:JANE STREET GROUP LLC] +Helpful Answer: CTL LLC, JANE STREET GROUP LLC owns Neo4j stocks. + +Follow this example when generating answers. If the provided information is empty, say that you don't know the answer. Information: {context} diff --git a/libs/langchain/langchain/chains/loading.py b/libs/langchain/langchain/chains/loading.py index 0675b87141ac7..0fc3da3f499ae 100644 --- a/libs/langchain/langchain/chains/loading.py +++ b/libs/langchain/langchain/chains/loading.py @@ -607,7 +607,7 @@ def _load_chain_from_file(file: Union[str, Path], **kwargs: Any) -> Chain: if file_path.suffix == ".json": with open(file_path) as f: config = json.load(f) - elif file_path.suffix == ".yaml": + elif file_path.suffix.endswith((".yaml", ".yml")): with open(file_path, "r") as f: config = yaml.safe_load(f) else: diff --git a/libs/langchain/langchain/chains/structured_output/base.py b/libs/langchain/langchain/chains/structured_output/base.py index d825c1fe7c896..965f215ecd39b 100644 --- a/libs/langchain/langchain/chains/structured_output/base.py +++ b/libs/langchain/langchain/chains/structured_output/base.py @@ -22,7 +22,7 @@ def create_openai_fn_runnable( functions: Sequence[Union[Dict[str, Any], Type[BaseModel], Callable]], llm: Runnable, - prompt: BasePromptTemplate, + prompt: Optional[BasePromptTemplate] = None, *, enforce_single_function_usage: bool = True, output_parser: Optional[Union[BaseOutputParser, BaseGenerationOutputParser]] = None, @@ -64,7 +64,6 @@ def create_openai_fn_runnable( from langchain.chains.structured_output import create_openai_fn_runnable from langchain_openai import ChatOpenAI - from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field @@ -85,15 +84,8 @@ class RecordDog(BaseModel): llm = ChatOpenAI(model="gpt-4", temperature=0) - prompt = ChatPromptTemplate.from_messages( - [ - ("system", "You are a world class algorithm for recording entities."), - ("human", "Make calls to the relevant function to record the entities in the following input: {input}"), - ("human", "Tip: Make sure to answer in the correct format"), - ] - ) - chain = create_openai_fn_runnable([RecordPerson, RecordDog], llm, prompt) - chain.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) + structured_llm = create_openai_fn_runnable([RecordPerson, RecordDog], llm) + structured_llm.invoke("Harry was a chubby brown beagle who loved chicken) # -> RecordDog(name="Harry", color="brown", fav_food="chicken") """ # noqa: E501 if not functions: @@ -103,14 +95,17 @@ class RecordDog(BaseModel): if len(openai_functions) == 1 and enforce_single_function_usage: llm_kwargs["function_call"] = {"name": openai_functions[0]["name"]} output_parser = output_parser or get_openai_output_parser(functions) - return prompt | llm.bind(**llm_kwargs) | output_parser + if prompt: + return prompt | llm.bind(**llm_kwargs) | output_parser + else: + return llm.bind(**llm_kwargs) | output_parser # TODO: implement mode='openai-tools'. def create_structured_output_runnable( output_schema: Union[Dict[str, Any], Type[BaseModel]], llm: Runnable, - prompt: BasePromptTemplate, + prompt: Optional[BasePromptTemplate] = None, *, output_parser: Optional[Union[BaseOutputParser, BaseGenerationOutputParser]] = None, mode: Literal["openai-functions", "openai-json"] = "openai-functions", @@ -152,7 +147,28 @@ def create_structured_output_runnable( from typing import Optional - from langchain.chains.structured_output import create_structured_output_runnable + from langchain.chains import create_structured_output_runnable + from langchain_openai import ChatOpenAI + from langchain_core.pydantic_v1 import BaseModel, Field + + class Dog(BaseModel): + '''Identifying information about a dog.''' + + name: str = Field(..., description="The dog's name") + color: str = Field(..., description="The dog's color") + fav_food: Optional[str] = Field(None, description="The dog's favorite food") + + llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-functions") + structured_llm.invoke("Harry was a chubby brown beagle who loved chicken") + # -> Dog(name="Harry", color="brown", fav_food="chicken") + + OpenAI functions with prompt example: + .. code-block:: python + + from typing import Optional + + from langchain.chains import create_structured_output_runnable from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field @@ -165,23 +181,20 @@ class Dog(BaseModel): fav_food: Optional[str] = Field(None, description="The dog's favorite food") llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-functions") + system = '''Extract information about any dogs mentioned in the user input.''' prompt = ChatPromptTemplate.from_messages( - [ - ("system", "You are a world class algorithm for extracting information in structured formats."), - ("human", "Use the given format to extract information from the following input: {input}"), - ("human", "Tip: Make sure to answer in the correct format"), - ] + [("system", system), ("human", "{input}"),] ) - chain = create_structured_output_runnable(Dog, llm, prompt, mode="openai-functions") + chain = prompt | structured_llm chain.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) # -> Dog(name="Harry", color="brown", fav_food="chicken") - OpenAI json response format example: .. code-block:: python from typing import Optional - from langchain.chains.structured_output import create_structured_output_runnable + from langchain.chains import create_structured_output_runnable from langchain_openai import ChatOpenAI from langchain_core.prompts import ChatPromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field @@ -194,32 +207,30 @@ class Dog(BaseModel): fav_food: Optional[str] = Field(None, description="The dog's favorite food") llm = ChatOpenAI(model="gpt-3.5-turbo-0125", temperature=0) + structured_llm = create_structured_output_runnable(Dog, llm, mode="openai-json") system = '''You are a world class assistant for extracting information in structured JSON formats. \ Extract a valid JSON blob from the user input that matches the following JSON Schema: {output_schema}''' prompt = ChatPromptTemplate.from_messages( - [ - ("system", system), - ("human", "{input}"), - ] + [("system", system), ("human", "{input}"),] ) - chain = create_structured_output_runnable(Dog, llm, prompt, mode="openai-json") + chain = prompt | structured_llm chain.invoke({"input": "Harry was a chubby brown beagle who loved chicken"}) """ # noqa: E501 if mode == "openai-functions": return _create_openai_functions_structured_output_runnable( output_schema, llm, - prompt, + prompt=prompt, output_parser=output_parser, enforce_single_function_usage=enforce_single_function_usage, **kwargs, ) elif mode == "openai-json": return _create_openai_json_runnable( - output_schema, llm, prompt, output_parser=output_parser, **kwargs + output_schema, llm, prompt=prompt, output_parser=output_parser, **kwargs ) else: raise ValueError( @@ -263,7 +274,7 @@ def get_openai_output_parser( def _create_openai_json_runnable( output_schema: Union[Dict[str, Any], Type[BaseModel]], llm: Runnable, - prompt: BasePromptTemplate, + prompt: Optional[BasePromptTemplate] = None, *, output_parser: Optional[Union[BaseOutputParser, BaseGenerationOutputParser]] = None, ) -> Runnable: @@ -277,17 +288,20 @@ def _create_openai_json_runnable( output_parser = output_parser or JsonOutputParser() schema_as_dict = output_schema - if "output_schema" in prompt.input_variables: - prompt = prompt.partial(output_schema=json.dumps(schema_as_dict, indent=2)) - llm = llm.bind(response_format={"type": "json_object"}) - return prompt | llm | output_parser + if prompt: + if "output_schema" in prompt.input_variables: + prompt = prompt.partial(output_schema=json.dumps(schema_as_dict, indent=2)) + + return prompt | llm | output_parser + else: + return llm | output_parser def _create_openai_functions_structured_output_runnable( output_schema: Union[Dict[str, Any], Type[BaseModel]], llm: Runnable, - prompt: BasePromptTemplate, + prompt: Optional[BasePromptTemplate] = None, *, output_parser: Optional[Union[BaseOutputParser, BaseGenerationOutputParser]] = None, **kwargs: Any, @@ -315,7 +329,7 @@ class _OutputFormatter(BaseModel): return create_openai_fn_runnable( [function], llm, - prompt, + prompt=prompt, output_parser=output_parser, **kwargs, ) diff --git a/libs/langchain/langchain/chains/summarize/__init__.py b/libs/langchain/langchain/chains/summarize/__init__.py index 9bc8b8118bd6b..3f6068a0f2689 100644 --- a/libs/langchain/langchain/chains/summarize/__init__.py +++ b/libs/langchain/langchain/chains/summarize/__init__.py @@ -52,6 +52,8 @@ def _load_map_reduce_chain( verbose: Optional[bool] = None, token_max: int = 3000, callbacks: Callbacks = None, + *, + collapse_max_retries: Optional[int] = None, **kwargs: Any, ) -> MapReduceDocumentsChain: map_chain = LLMChain( @@ -92,6 +94,7 @@ def _load_map_reduce_chain( token_max=token_max, verbose=verbose, callbacks=callbacks, + collapse_max_retries=collapse_max_retries, ) return MapReduceDocumentsChain( llm_chain=map_chain, diff --git a/libs/langchain/langchain/embeddings/cache.py b/libs/langchain/langchain/embeddings/cache.py index 382827991ff5d..51d3969e53967 100644 --- a/libs/langchain/langchain/embeddings/cache.py +++ b/libs/langchain/langchain/embeddings/cache.py @@ -128,6 +128,41 @@ def embed_documents(self, texts: List[str]) -> List[List[float]]: List[List[float]], vectors ) # Nones should have been resolved by now + async def aembed_documents(self, texts: List[str]) -> List[List[float]]: + """Embed a list of texts. + + The method first checks the cache for the embeddings. + If the embeddings are not found, the method uses the underlying embedder + to embed the documents and stores the results in the cache. + + Args: + texts: A list of texts to embed. + + Returns: + A list of embeddings for the given texts. + """ + vectors: List[ + Union[List[float], None] + ] = await self.document_embedding_store.amget(texts) + missing_indices: List[int] = [ + i for i, vector in enumerate(vectors) if vector is None + ] + missing_texts = [texts[i] for i in missing_indices] + + if missing_texts: + missing_vectors = await self.underlying_embeddings.aembed_documents( + missing_texts + ) + await self.document_embedding_store.amset( + list(zip(missing_texts, missing_vectors)) + ) + for index, updated_vector in zip(missing_indices, missing_vectors): + vectors[index] = updated_vector + + return cast( + List[List[float]], vectors + ) # Nones should have been resolved by now + def embed_query(self, text: str) -> List[float]: """Embed query text. @@ -148,6 +183,26 @@ def embed_query(self, text: str) -> List[float]: """ return self.underlying_embeddings.embed_query(text) + async def aembed_query(self, text: str) -> List[float]: + """Embed query text. + + This method does not support caching at the moment. + + Support for caching queries is easily to implement, but might make + sense to hold off to see the most common patterns. + + If the cache has an eviction policy, we may need to be a bit more careful + about sharing the cache between documents and queries. Generally, + one is OK evicting query caches, but document caches should be kept. + + Args: + text: The text to embed. + + Returns: + The embedding for the given text. + """ + return await self.underlying_embeddings.aembed_query(text) + @classmethod def from_bytes_store( cls, diff --git a/libs/langchain/langchain/output_parsers/pydantic.py b/libs/langchain/langchain/output_parsers/pydantic.py index 1248560c0ca2f..9e415650425c3 100644 --- a/libs/langchain/langchain/output_parsers/pydantic.py +++ b/libs/langchain/langchain/output_parsers/pydantic.py @@ -29,7 +29,8 @@ def parse_result(self, result: List[Generation], *, partial: bool = False) -> An raise OutputParserException(msg, llm_output=json_object) def get_format_instructions(self) -> str: - schema = self.pydantic_object.schema() + # Copy schema to avoid altering original Pydantic schema. + schema = {k: v for k, v in self.pydantic_object.schema().items()} # Remove extraneous fields. reduced_schema = schema diff --git a/libs/langchain/langchain/output_parsers/yaml.py b/libs/langchain/langchain/output_parsers/yaml.py index 528fc93f50762..21bcf359a2c95 100644 --- a/libs/langchain/langchain/output_parsers/yaml.py +++ b/libs/langchain/langchain/output_parsers/yaml.py @@ -43,7 +43,8 @@ def parse(self, text: str) -> T: raise OutputParserException(msg, llm_output=text) from e def get_format_instructions(self) -> str: - schema = self.pydantic_object.schema() + # Copy schema to avoid altering original Pydantic schema. + schema = {k: v for k, v in self.pydantic_object.schema().items()} # Remove extraneous fields. reduced_schema = schema diff --git a/libs/langchain/langchain/retrievers/document_compressors/__init__.py b/libs/langchain/langchain/retrievers/document_compressors/__init__.py index 410ad540d19e8..a4c1456cf24d0 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/__init__.py +++ b/libs/langchain/langchain/retrievers/document_compressors/__init__.py @@ -9,6 +9,7 @@ from langchain.retrievers.document_compressors.embeddings_filter import ( EmbeddingsFilter, ) +from langchain.retrievers.document_compressors.flashrank_rerank import FlashrankRerank __all__ = [ "DocumentCompressorPipeline", @@ -16,4 +17,5 @@ "LLMChainExtractor", "LLMChainFilter", "CohereRerank", + "FlashrankRerank", ] diff --git a/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py b/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py index b36ea305c7893..3374f7906b907 100644 --- a/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py +++ b/libs/langchain/langchain/retrievers/document_compressors/cohere_rerank.py @@ -14,14 +14,15 @@ class CohereRerank(BaseDocumentCompressor): """Document compressor that uses `Cohere Rerank API`.""" - client: Any + client: Any = None """Cohere client to use for compressing documents.""" top_n: Optional[int] = 3 """Number of documents to return.""" model: str = "rerank-english-v2.0" """Model to use for reranking.""" - cohere_api_key: Optional[str] = None + """Cohere API key. Must be specified directly or via environment variable + COHERE_API_KEY.""" user_agent: str = "langchain" """Identifier for the application making the request.""" @@ -34,18 +35,19 @@ class Config: @root_validator(pre=True) def validate_environment(cls, values: Dict) -> Dict: """Validate that api key and python package exists in environment.""" - try: - import cohere - except ImportError: - raise ImportError( - "Could not import cohere python package. " - "Please install it with `pip install cohere`." + if not values.get("client"): + try: + import cohere + except ImportError: + raise ImportError( + "Could not import cohere python package. " + "Please install it with `pip install cohere`." + ) + cohere_api_key = get_from_dict_or_env( + values, "cohere_api_key", "COHERE_API_KEY" ) - cohere_api_key = get_from_dict_or_env( - values, "cohere_api_key", "COHERE_API_KEY" - ) - client_name = values.get("user_agent", "langchain") - values["client"] = cohere.Client(cohere_api_key, client_name=client_name) + client_name = values.get("user_agent", "langchain") + values["client"] = cohere.Client(cohere_api_key, client_name=client_name) return values def rerank( diff --git a/libs/langchain/langchain/retrievers/document_compressors/flashrank_rerank.py b/libs/langchain/langchain/retrievers/document_compressors/flashrank_rerank.py new file mode 100644 index 0000000000000..273aff3d44ca3 --- /dev/null +++ b/libs/langchain/langchain/retrievers/document_compressors/flashrank_rerank.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict, Optional, Sequence + +from langchain_core.documents import Document +from langchain_core.pydantic_v1 import Extra, root_validator + +from langchain.callbacks.manager import Callbacks +from langchain.retrievers.document_compressors.base import BaseDocumentCompressor + +if TYPE_CHECKING: + from flashrank import Ranker, RerankRequest +else: + # Avoid pydantic annotation issues when actually instantiating + # while keeping this import optional + try: + from flashrank import Ranker, RerankRequest + except ImportError: + pass + +DEFAULT_MODEL_NAME = "ms-marco-MultiBERT-L-12" + + +class FlashrankRerank(BaseDocumentCompressor): + """Document compressor using Flashrank interface.""" + + client: Ranker + """Flashrank client to use for compressing documents""" + top_n: int = 3 + """Number of documents to return.""" + model: Optional[str] = None + """Model to use for reranking.""" + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + arbitrary_types_allowed = True + + @root_validator(pre=True) + def validate_environment(cls, values: Dict) -> Dict: + """Validate that api key and python package exists in environment.""" + try: + from flashrank import Ranker + except ImportError: + raise ImportError( + "Could not import flashrank python package. " + "Please install it with `pip install flashrank`." + ) + + values["model"] = values.get("model", DEFAULT_MODEL_NAME) + values["client"] = Ranker(model_name=values["model"]) + return values + + def compress_documents( + self, + documents: Sequence[Document], + query: str, + callbacks: Optional[Callbacks] = None, + ) -> Sequence[Document]: + passages = [ + {"id": i, "text": doc.page_content} for i, doc in enumerate(documents) + ] + + rerank_request = RerankRequest(query=query, passages=passages) + rerank_response = self.client.rerank(rerank_request)[: self.top_n] + final_results = [] + for r in rerank_response: + doc = Document( + page_content=r["text"], + metadata={"id": r["id"], "relevance_score": r["score"]}, + ) + final_results.append(doc) + return final_results diff --git a/libs/langchain/langchain/retrievers/multi_query.py b/libs/langchain/langchain/retrievers/multi_query.py index 7d60b2215140b..ca7e731c51a67 100644 --- a/libs/langchain/langchain/retrievers/multi_query.py +++ b/libs/langchain/langchain/retrievers/multi_query.py @@ -1,39 +1,28 @@ import asyncio import logging -from typing import List, Sequence +from typing import List, Optional, Sequence from langchain_core.callbacks import ( AsyncCallbackManagerForRetrieverRun, CallbackManagerForRetrieverRun, ) from langchain_core.documents import Document -from langchain_core.language_models import BaseLLM +from langchain_core.language_models import BaseLanguageModel +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts.prompt import PromptTemplate -from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.retrievers import BaseRetriever from langchain.chains.llm import LLMChain -from langchain.output_parsers.pydantic import PydanticOutputParser logger = logging.getLogger(__name__) -class LineList(BaseModel): - """List of lines.""" - - lines: List[str] = Field(description="Lines of text") - """List of lines.""" - - -class LineListOutputParser(PydanticOutputParser): +class LineListOutputParser(BaseOutputParser[List[str]]): """Output parser for a list of lines.""" - def __init__(self) -> None: - super().__init__(pydantic_object=LineList) - - def parse(self, text: str) -> LineList: + def parse(self, text: str) -> List[str]: lines = text.strip().split("\n") - return LineList(lines=lines) + return lines # Default prompt @@ -63,6 +52,7 @@ class MultiQueryRetriever(BaseRetriever): llm_chain: LLMChain verbose: bool = True parser_key: str = "lines" + """DEPRECATED. parser_key is no longer used and should not be specified.""" include_original: bool = False """Whether to include the original query in the list of generated queries.""" @@ -70,9 +60,9 @@ class MultiQueryRetriever(BaseRetriever): def from_llm( cls, retriever: BaseRetriever, - llm: BaseLLM, + llm: BaseLanguageModel, prompt: PromptTemplate = DEFAULT_QUERY_PROMPT, - parser_key: str = "lines", + parser_key: Optional[str] = None, include_original: bool = False, ) -> "MultiQueryRetriever": """Initialize from llm using default template. @@ -91,7 +81,6 @@ def from_llm( return cls( retriever=retriever, llm_chain=llm_chain, - parser_key=parser_key, include_original=include_original, ) @@ -129,7 +118,7 @@ async def agenerate_queries( response = await self.llm_chain.acall( inputs={"question": question}, callbacks=run_manager.get_child() ) - lines = getattr(response["text"], self.parser_key, []) + lines = response["text"] if self.verbose: logger.info(f"Generated queries: {lines}") return lines @@ -189,7 +178,7 @@ def generate_queries( response = self.llm_chain( {"question": question}, callbacks=run_manager.get_child() ) - lines = getattr(response["text"], self.parser_key, []) + lines = response["text"] if self.verbose: logger.info(f"Generated queries: {lines}") return lines diff --git a/libs/langchain/langchain/retrievers/self_query/elasticsearch.py b/libs/langchain/langchain/retrievers/self_query/elasticsearch.py index c908faa643f3f..7c2f7671a5c9b 100644 --- a/libs/langchain/langchain/retrievers/self_query/elasticsearch.py +++ b/libs/langchain/langchain/retrievers/self_query/elasticsearch.py @@ -61,11 +61,10 @@ def visit_comparison(self, comparison: Comparison) -> Dict: ] if is_range_comparator: - return { - "range": { - field: {self._format_func(comparison.comparator): comparison.value} - } - } + value = comparison.value + if isinstance(comparison.value, dict) and "date" in comparison.value: + value = comparison.value["date"] + return {"range": {field: {self._format_func(comparison.comparator): value}}} if comparison.comparator == Comparator.CONTAIN: return { @@ -85,6 +84,10 @@ def visit_comparison(self, comparison: Comparison) -> Dict: # we want to use the keyword field field = f"{field}.keyword" if isinstance(comparison.value, str) else field + if isinstance(comparison.value, dict): + if "date" in comparison.value: + comparison.value = comparison.value["date"] + return {self._format_func(comparison.comparator): {field: comparison.value}} def visit_structured_query( diff --git a/libs/langchain/langchain/retrievers/web_research.py b/libs/langchain/langchain/retrievers/web_research.py index 149ef247af730..b992490f61723 100644 --- a/libs/langchain/langchain/retrievers/web_research.py +++ b/libs/langchain/langchain/retrievers/web_research.py @@ -12,6 +12,7 @@ ) from langchain_core.documents import Document from langchain_core.language_models import BaseLLM +from langchain_core.output_parsers import BaseOutputParser from langchain_core.prompts import BasePromptTemplate, PromptTemplate from langchain_core.pydantic_v1 import BaseModel, Field from langchain_core.retrievers import BaseRetriever @@ -19,7 +20,6 @@ from langchain.chains import LLMChain from langchain.chains.prompt_selector import ConditionalPromptSelector -from langchain.output_parsers.pydantic import PydanticOutputParser from langchain.text_splitter import RecursiveCharacterTextSplitter, TextSplitter logger = logging.getLogger(__name__) @@ -50,21 +50,12 @@ class SearchQueries(BaseModel): ) -class LineList(BaseModel): - """List of questions.""" - - lines: List[str] = Field(description="Questions") - - -class QuestionListOutputParser(PydanticOutputParser): +class QuestionListOutputParser(BaseOutputParser[List[str]]): """Output parser for a list of numbered questions.""" - def __init__(self) -> None: - super().__init__(pydantic_object=LineList) - - def parse(self, text: str) -> LineList: + def parse(self, text: str) -> List[str]: lines = re.findall(r"\d+\..*?(?:\n|$)", text) - return LineList(lines=lines) + return lines class WebResearchRetriever(BaseRetriever): @@ -176,7 +167,7 @@ def _get_relevant_documents( logger.info("Generating questions for Google Search ...") result = self.llm_chain({"question": query}) logger.info(f"Questions for Google Search (raw): {result}") - questions = getattr(result["text"], "lines", []) + questions = result["text"] logger.info(f"Questions for Google Search: {questions}") # Get urls diff --git a/libs/langchain/langchain/text_splitter.py b/libs/langchain/langchain/text_splitter.py index a2c048f686ba6..1f382a53d0ef9 100644 --- a/libs/langchain/langchain/text_splitter.py +++ b/libs/langchain/langchain/text_splitter.py @@ -843,6 +843,9 @@ class Language(str, Enum): SOL = "sol" CSHARP = "csharp" COBOL = "cobol" + C = "c" + LUA = "lua" + PERL = "perl" class RecursiveCharacterTextSplitter(TextSplitter): diff --git a/libs/langchain/poetry.lock b/libs/langchain/poetry.lock index 64a0beba1dc59..a8e98f8c65b43 100644 --- a/libs/langchain/poetry.lock +++ b/libs/langchain/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiodns" @@ -3049,6 +3049,7 @@ files = [ {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:227b178b22a7f91ae88525810441791b1ca1fc71c86f03190911793be15cec3d"}, {file = "jq-1.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:780eb6383fbae12afa819ef676fc93e1548ae4b076c004a393af26a04b460742"}, {file = "jq-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:08ded6467f4ef89fec35b2bf310f210f8cd13fbd9d80e521500889edf8d22441"}, + {file = "jq-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:49e44ed677713f4115bd5bf2dbae23baa4cd503be350e12a1c1f506b0687848f"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:984f33862af285ad3e41e23179ac4795f1701822473e1a26bf87ff023e5a89ea"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f42264fafc6166efb5611b5d4cb01058887d050a6c19334f6a3f8a13bb369df5"}, {file = "jq-1.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a67154f150aaf76cc1294032ed588436eb002097dd4fd1e283824bf753a05080"}, @@ -3132,7 +3133,6 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] @@ -3446,7 +3446,7 @@ files = [ [[package]] name = "langchain-community" -version = "0.0.19" +version = "0.0.20" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" @@ -3457,7 +3457,7 @@ develop = true aiohttp = "^3.8.3" dataclasses-json = ">= 0.5.7, < 0.7" langchain-core = ">=0.1.21,<0.2" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.1.0" numpy = "^1" PyYAML = ">=5.3" requests = "^2" @@ -3466,7 +3466,7 @@ tenacity = "^8.1.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [package.source] type = "directory" @@ -3474,7 +3474,7 @@ url = "../community" [[package]] name = "langchain-core" -version = "0.1.22" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -3484,7 +3484,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = "^0.0.87" +langsmith = "^0.1.0" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -3517,13 +3517,13 @@ tiktoken = ">=0.5.2,<0.6.0" [[package]] name = "langsmith" -version = "0.0.87" +version = "0.1.1" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, - {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, ] [package.dependencies] @@ -3743,16 +3743,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -5279,8 +5269,6 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, - {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, - {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -5323,7 +5311,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -5332,8 +5319,6 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -5806,7 +5791,6 @@ files = [ {file = "pymongo-4.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6422b6763b016f2ef2beedded0e546d6aa6ba87910f9244d86e0ac7690f75c96"}, {file = "pymongo-4.5.0-cp312-cp312-win32.whl", hash = "sha256:77cfff95c1fafd09e940b3fdcb7b65f11442662fad611d0e69b4dd5d17a81c60"}, {file = "pymongo-4.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:e57d859b972c75ee44ea2ef4758f12821243e99de814030f69a3decb2aa86807"}, - {file = "pymongo-4.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8443f3a8ab2d929efa761c6ebce39a6c1dca1c9ac186ebf11b62c8fe1aef53f4"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2b0176f9233a5927084c79ff80b51bd70bfd57e4f3d564f50f80238e797f0c8a"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:89b3f2da57a27913d15d2a07d58482f33d0a5b28abd20b8e643ab4d625e36257"}, {file = "pymongo-4.5.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5caee7bd08c3d36ec54617832b44985bd70c4cbd77c5b313de6f7fce0bb34f93"}, @@ -6323,7 +6307,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -6331,16 +6314,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -6357,7 +6332,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -6365,7 +6339,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -9150,4 +9123,4 @@ text-helpers = ["chardet"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "5ac4a6a281e7816edff5ab0f851dd0a3a5fe50fc33513b789187aaa31a0ace1b" +content-hash = "e3419e64eee15fa3e93c00e2b76d4a29d9b08c4299291ec3fc65802b2aede5c0" diff --git a/libs/langchain/pyproject.toml b/libs/langchain/pyproject.toml index 6e3722d5eb174..89387d7391033 100644 --- a/libs/langchain/pyproject.toml +++ b/libs/langchain/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain" -version = "0.1.6" +version = "0.1.7" description = "Building applications with LLMs through composability" authors = [] license = "MIT" @@ -13,8 +13,8 @@ langchain-server = "langchain.server:main" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" langchain-core = ">=0.1.22,<0.2" -langchain-community = ">=0.0.18,<0.1" -langsmith = ">=0.0.83,<0.1" +langchain-community = ">=0.0.20,<0.1" +langsmith = "^0.1.0" pydantic = ">=1,<3" SQLAlchemy = ">=1.4,<3" requests = "^2" diff --git a/libs/langchain/tests/integration_tests/cache/test_astradb.py b/libs/langchain/tests/integration_tests/cache/test_astradb.py index 37d538f8004c2..0c973e60fd1f2 100644 --- a/libs/langchain/tests/integration_tests/cache/test_astradb.py +++ b/libs/langchain/tests/integration_tests/cache/test_astradb.py @@ -12,9 +12,12 @@ """ import os -from typing import Iterator +from typing import AsyncIterator, Iterator import pytest +from langchain_community.utilities.astradb import SetupMode +from langchain_core.caches import BaseCache +from langchain_core.language_models import LLM from langchain_core.outputs import Generation, LLMResult from langchain.cache import AstraDBCache, AstraDBSemanticCache @@ -41,7 +44,22 @@ def astradb_cache() -> Iterator[AstraDBCache]: namespace=os.environ.get("ASTRA_DB_KEYSPACE"), ) yield cache - cache.astra_db.delete_collection("lc_integration_test_cache") + cache.collection.astra_db.delete_collection("lc_integration_test_cache") + + +@pytest.fixture +async def async_astradb_cache() -> AsyncIterator[AstraDBCache]: + cache = AstraDBCache( + collection_name="lc_integration_test_cache_async", + token=os.environ["ASTRA_DB_APPLICATION_TOKEN"], + api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"], + namespace=os.environ.get("ASTRA_DB_KEYSPACE"), + setup_mode=SetupMode.ASYNC, + ) + yield cache + await cache.async_collection.astra_db.delete_collection( + "lc_integration_test_cache_async" + ) @pytest.fixture(scope="module") @@ -55,46 +73,87 @@ def astradb_semantic_cache() -> Iterator[AstraDBSemanticCache]: embedding=fake_embe, ) yield sem_cache - sem_cache.astra_db.delete_collection("lc_integration_test_cache") + sem_cache.collection.astra_db.delete_collection("lc_integration_test_sem_cache") + + +@pytest.fixture +async def async_astradb_semantic_cache() -> AsyncIterator[AstraDBSemanticCache]: + fake_embe = FakeEmbeddings() + sem_cache = AstraDBSemanticCache( + collection_name="lc_integration_test_sem_cache_async", + token=os.environ["ASTRA_DB_APPLICATION_TOKEN"], + api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"], + namespace=os.environ.get("ASTRA_DB_KEYSPACE"), + embedding=fake_embe, + setup_mode=SetupMode.ASYNC, + ) + yield sem_cache + sem_cache.collection.astra_db.delete_collection( + "lc_integration_test_sem_cache_async" + ) @pytest.mark.requires("astrapy") @pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") class TestAstraDBCaches: def test_astradb_cache(self, astradb_cache: AstraDBCache) -> None: - set_llm_cache(astradb_cache) + self.do_cache_test(FakeLLM(), astradb_cache, "foo") + + async def test_astradb_cache_async(self, async_astradb_cache: AstraDBCache) -> None: + await self.ado_cache_test(FakeLLM(), async_astradb_cache, "foo") + + def test_astradb_semantic_cache( + self, astradb_semantic_cache: AstraDBSemanticCache + ) -> None: llm = FakeLLM() + self.do_cache_test(llm, astradb_semantic_cache, "bar") + output = llm.generate(["bar"]) # 'fizz' is erased away now + assert output != LLMResult( + generations=[[Generation(text="fizz")]], + llm_output={}, + ) + astradb_semantic_cache.clear() + + async def test_astradb_semantic_cache_async( + self, async_astradb_semantic_cache: AstraDBSemanticCache + ) -> None: + llm = FakeLLM() + await self.ado_cache_test(llm, async_astradb_semantic_cache, "bar") + output = await llm.agenerate(["bar"]) # 'fizz' is erased away now + assert output != LLMResult( + generations=[[Generation(text="fizz")]], + llm_output={}, + ) + await async_astradb_semantic_cache.aclear() + + @staticmethod + def do_cache_test(llm: LLM, cache: BaseCache, prompt: str) -> None: + set_llm_cache(cache) params = llm.dict() params["stop"] = None llm_string = str(sorted([(k, v) for k, v in params.items()])) get_llm_cache().update("foo", llm_string, [Generation(text="fizz")]) - output = llm.generate(["foo"]) - print(output) # noqa: T201 + output = llm.generate([prompt]) expected_output = LLMResult( generations=[[Generation(text="fizz")]], llm_output={}, ) - print(expected_output) # noqa: T201 assert output == expected_output - astradb_cache.clear() + # clear the cache + cache.clear() - def test_cassandra_semantic_cache( - self, astradb_semantic_cache: AstraDBSemanticCache - ) -> None: - set_llm_cache(astradb_semantic_cache) - llm = FakeLLM() + @staticmethod + async def ado_cache_test(llm: LLM, cache: BaseCache, prompt: str) -> None: + set_llm_cache(cache) params = llm.dict() params["stop"] = None llm_string = str(sorted([(k, v) for k, v in params.items()])) - get_llm_cache().update("foo", llm_string, [Generation(text="fizz")]) - output = llm.generate(["bar"]) # same embedding as 'foo' + await get_llm_cache().aupdate("foo", llm_string, [Generation(text="fizz")]) + output = await llm.agenerate([prompt]) expected_output = LLMResult( generations=[[Generation(text="fizz")]], llm_output={}, ) assert output == expected_output # clear the cache - astradb_semantic_cache.clear() - output = llm.generate(["bar"]) # 'fizz' is erased away now - assert output != expected_output - astradb_semantic_cache.clear() + await cache.aclear() diff --git a/libs/langchain/tests/integration_tests/memory/test_astradb.py b/libs/langchain/tests/integration_tests/memory/test_astradb.py index a8ed9e7574ba7..4caf39985ec8e 100644 --- a/libs/langchain/tests/integration_tests/memory/test_astradb.py +++ b/libs/langchain/tests/integration_tests/memory/test_astradb.py @@ -1,10 +1,11 @@ import os -from typing import Iterable +from typing import AsyncIterable, Iterable import pytest from langchain_community.chat_message_histories.astradb import ( AstraDBChatMessageHistory, ) +from langchain_community.utilities.astradb import SetupMode from langchain_core.messages import AIMessage, HumanMessage from langchain.memory import ConversationBufferMemory @@ -29,7 +30,7 @@ def history1() -> Iterable[AstraDBChatMessageHistory]: namespace=os.environ.get("ASTRA_DB_KEYSPACE"), ) yield history1 - history1.astra_db.delete_collection("langchain_cmh_test") + history1.collection.astra_db.delete_collection("langchain_cmh_test") @pytest.fixture(scope="function") @@ -42,7 +43,35 @@ def history2() -> Iterable[AstraDBChatMessageHistory]: namespace=os.environ.get("ASTRA_DB_KEYSPACE"), ) yield history2 - history2.astra_db.delete_collection("langchain_cmh_test") + history2.collection.astra_db.delete_collection("langchain_cmh_test") + + +@pytest.fixture +async def async_history1() -> AsyncIterable[AstraDBChatMessageHistory]: + history1 = AstraDBChatMessageHistory( + session_id="async-session-test-1", + collection_name="langchain_cmh_test", + token=os.environ["ASTRA_DB_APPLICATION_TOKEN"], + api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"], + namespace=os.environ.get("ASTRA_DB_KEYSPACE"), + setup_mode=SetupMode.ASYNC, + ) + yield history1 + await history1.async_collection.astra_db.delete_collection("langchain_cmh_test") + + +@pytest.fixture(scope="function") +async def async_history2() -> AsyncIterable[AstraDBChatMessageHistory]: + history2 = AstraDBChatMessageHistory( + session_id="async-session-test-2", + collection_name="langchain_cmh_test", + token=os.environ["ASTRA_DB_APPLICATION_TOKEN"], + api_endpoint=os.environ["ASTRA_DB_API_ENDPOINT"], + namespace=os.environ.get("ASTRA_DB_KEYSPACE"), + setup_mode=SetupMode.ASYNC, + ) + yield history2 + await history2.async_collection.astra_db.delete_collection("langchain_cmh_test") @pytest.mark.requires("astrapy") @@ -58,8 +87,12 @@ def test_memory_with_message_store(history1: AstraDBChatMessageHistory) -> None: assert memory.chat_memory.messages == [] # add some messages - memory.chat_memory.add_ai_message("This is me, the AI") - memory.chat_memory.add_user_message("This is me, the human") + memory.chat_memory.add_messages( + [ + AIMessage(content="This is me, the AI"), + HumanMessage(content="This is me, the human"), + ] + ) messages = memory.chat_memory.messages expected = [ @@ -74,6 +107,41 @@ def test_memory_with_message_store(history1: AstraDBChatMessageHistory) -> None: assert memory.chat_memory.messages == [] +@pytest.mark.requires("astrapy") +@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") +async def test_memory_with_message_store_async( + async_history1: AstraDBChatMessageHistory, +) -> None: + """Test the memory with a message store.""" + memory = ConversationBufferMemory( + memory_key="baz", + chat_memory=async_history1, + return_messages=True, + ) + + assert await memory.chat_memory.aget_messages() == [] + + # add some messages + await memory.chat_memory.aadd_messages( + [ + AIMessage(content="This is me, the AI"), + HumanMessage(content="This is me, the human"), + ] + ) + + messages = await memory.chat_memory.aget_messages() + expected = [ + AIMessage(content="This is me, the AI"), + HumanMessage(content="This is me, the human"), + ] + assert messages == expected + + # clear the store + await memory.chat_memory.aclear() + + assert await memory.chat_memory.aget_messages() == [] + + @pytest.mark.requires("astrapy") @pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") def test_memory_separate_session_ids( @@ -91,7 +159,7 @@ def test_memory_separate_session_ids( return_messages=True, ) - memory1.chat_memory.add_ai_message("Just saying.") + memory1.chat_memory.add_messages([AIMessage(content="Just saying.")]) assert memory2.chat_memory.messages == [] @@ -102,3 +170,33 @@ def test_memory_separate_session_ids( memory1.chat_memory.clear() assert memory1.chat_memory.messages == [] + + +@pytest.mark.requires("astrapy") +@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") +async def test_memory_separate_session_ids_async( + async_history1: AstraDBChatMessageHistory, async_history2: AstraDBChatMessageHistory +) -> None: + """Test that separate session IDs do not share entries.""" + memory1 = ConversationBufferMemory( + memory_key="mk1", + chat_memory=async_history1, + return_messages=True, + ) + memory2 = ConversationBufferMemory( + memory_key="mk2", + chat_memory=async_history2, + return_messages=True, + ) + + await memory1.chat_memory.aadd_messages([AIMessage(content="Just saying.")]) + + assert await memory2.chat_memory.aget_messages() == [] + + await memory2.chat_memory.aclear() + + assert await memory1.chat_memory.aget_messages() != [] + + await memory1.chat_memory.aclear() + + assert await memory1.chat_memory.aget_messages() == [] diff --git a/libs/langchain/tests/unit_tests/chains/test_base.py b/libs/langchain/tests/unit_tests/chains/test_base.py index 2c410e2337ddb..c96f1d945b1c3 100644 --- a/libs/langchain/tests/unit_tests/chains/test_base.py +++ b/libs/langchain/tests/unit_tests/chains/test_base.py @@ -162,3 +162,35 @@ def test_run_with_callback() -> None: assert handler.starts == 1 assert handler.ends == 1 assert handler.errors == 0 + + +def test_run_with_callback_and_input_error() -> None: + """Test callback manager catches run validation input error.""" + handler = FakeCallbackHandler() + chain = FakeChain( + the_input_keys=["foo", "bar"], + callbacks=[handler], + ) + + with pytest.raises(ValueError): + chain({"bar": "foo"}) + + assert handler.starts == 1 + assert handler.ends == 0 + assert handler.errors == 1 + + +def test_run_with_callback_and_output_error() -> None: + """Test callback manager catches run validation output error.""" + handler = FakeCallbackHandler() + chain = FakeChain( + the_output_keys=["foo", "bar"], + callbacks=[handler], + ) + + with pytest.raises(ValueError): + chain("foo") + + assert handler.starts == 1 + assert handler.ends == 0 + assert handler.errors == 1 diff --git a/libs/langchain/tests/unit_tests/chains/test_imports.py b/libs/langchain/tests/unit_tests/chains/test_imports.py index 8317dd62ea983..4da79031fe31b 100644 --- a/libs/langchain/tests/unit_tests/chains/test_imports.py +++ b/libs/langchain/tests/unit_tests/chains/test_imports.py @@ -33,6 +33,7 @@ "NatBotChain", "NebulaGraphQAChain", "NeptuneOpenCypherQAChain", + "NeptuneSparqlQAChain", "OpenAIModerationChain", "OpenAPIEndpointChain", "QAGenerationChain", @@ -61,6 +62,7 @@ "create_history_aware_retriever", "create_retrieval_chain", "load_summarize_chain", + "create_structured_output_runnable", ] diff --git a/libs/langchain/tests/unit_tests/embeddings/test_caching.py b/libs/langchain/tests/unit_tests/embeddings/test_caching.py index 48c6adbab6600..8c24e73b95b3f 100644 --- a/libs/langchain/tests/unit_tests/embeddings/test_caching.py +++ b/libs/langchain/tests/unit_tests/embeddings/test_caching.py @@ -47,3 +47,23 @@ def test_embed_query(cache_embeddings: CacheBackedEmbeddings) -> None: vector = cache_embeddings.embed_query(text) expected_vector = [5.0, 6.0] assert vector == expected_vector + + +async def test_aembed_documents(cache_embeddings: CacheBackedEmbeddings) -> None: + texts = ["1", "22", "a", "333"] + vectors = await cache_embeddings.aembed_documents(texts) + expected_vectors: List[List[float]] = [[1, 2.0], [2.0, 3.0], [1.0, 2.0], [3.0, 4.0]] + assert vectors == expected_vectors + keys = [ + key async for key in cache_embeddings.document_embedding_store.ayield_keys() + ] + assert len(keys) == 4 + # UUID is expected to be the same for the same text + assert keys[0] == "test_namespace812b86c1-8ebf-5483-95c6-c95cf2b52d12" + + +async def test_aembed_query(cache_embeddings: CacheBackedEmbeddings) -> None: + text = "query_text" + vector = await cache_embeddings.aembed_query(text) + expected_vector = [5.0, 6.0] + assert vector == expected_vector diff --git a/libs/langchain/tests/unit_tests/retrievers/self_query/test_elasticsearch.py b/libs/langchain/tests/unit_tests/retrievers/self_query/test_elasticsearch.py index 25c4526c8010d..519f366f03146 100644 --- a/libs/langchain/tests/unit_tests/retrievers/self_query/test_elasticsearch.py +++ b/libs/langchain/tests/unit_tests/retrievers/self_query/test_elasticsearch.py @@ -218,3 +218,98 @@ def test_visit_structured_query_complex() -> None: ) actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query) assert expected == actual + + +def test_visit_structured_query_with_date_range() -> None: + query = "Who was the president of France in 1995?" + operation = Operation( + operator=Operator.AND, + arguments=[ + Comparison(comparator=Comparator.EQ, attribute="foo", value="20"), + Operation( + operator=Operator.AND, + arguments=[ + Comparison( + comparator=Comparator.GTE, + attribute="timestamp", + value={"date": "1995-01-01", "type": "date"}, + ), + Comparison( + comparator=Comparator.LT, + attribute="timestamp", + value={"date": "1996-01-01", "type": "date"}, + ), + ], + ), + ], + ) + structured_query = StructuredQuery(query=query, filter=operation, limit=None) + expected = ( + query, + { + "filter": [ + { + "bool": { + "must": [ + {"term": {"metadata.foo.keyword": "20"}}, + { + "bool": { + "must": [ + { + "range": { + "metadata.timestamp": { + "gte": "1995-01-01" + } + } + }, + { + "range": { + "metadata.timestamp": { + "lt": "1996-01-01" + } + } + }, + ] + } + }, + ] + } + } + ] + }, + ) + actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query) + assert expected == actual + + +def test_visit_structured_query_with_date() -> None: + query = "Who was the president of France on 1st of January 1995?" + operation = Operation( + operator=Operator.AND, + arguments=[ + Comparison(comparator=Comparator.EQ, attribute="foo", value="20"), + Comparison( + comparator=Comparator.EQ, + attribute="timestamp", + value={"date": "1995-01-01", "type": "date"}, + ), + ], + ) + structured_query = StructuredQuery(query=query, filter=operation, limit=None) + expected = ( + query, + { + "filter": [ + { + "bool": { + "must": [ + {"term": {"metadata.foo.keyword": "20"}}, + {"term": {"metadata.timestamp": "1995-01-01"}}, + ] + } + } + ] + }, + ) + actual = DEFAULT_TRANSLATOR.visit_structured_query(structured_query) + assert expected == actual diff --git a/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py b/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py index a84ee1f26a734..8b9cbbed11561 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_ensemble.py @@ -1,6 +1,8 @@ import pytest from langchain_core.documents import Document +from langchain.embeddings import FakeEmbeddings +from langchain.retrievers import KNNRetriever, TFIDFRetriever from langchain.retrievers.bm25 import BM25Retriever from langchain.retrievers.ensemble import EnsembleRetriever @@ -40,3 +42,38 @@ def test_weighted_reciprocal_rank() -> None: result = ensemble_retriever.weighted_reciprocal_rank([[doc1, doc2], [doc2, doc1]]) assert result[0].page_content == "1" assert result[1].page_content == "2" + + +@pytest.mark.requires("rank_bm25", "sklearn") +def test_ensemble_retriever_get_relevant_docs_with_multiple_retrievers() -> None: + doc_list_a = [ + "I like apples", + "I like oranges", + "Apples and oranges are fruits", + ] + doc_list_b = [ + "I like melons", + "I like pineapples", + "Melons and pineapples are fruits", + ] + doc_list_c = [ + "I like avocados", + "I like strawberries", + "Avocados and strawberries are fruits", + ] + + dummy_retriever = BM25Retriever.from_texts(doc_list_a) + dummy_retriever.k = 1 + tfidf_retriever = TFIDFRetriever.from_texts(texts=doc_list_b) + tfidf_retriever.k = 1 + knn_retriever = KNNRetriever.from_texts( + texts=doc_list_c, embeddings=FakeEmbeddings(size=100) + ) + knn_retriever.k = 1 + + ensemble_retriever = EnsembleRetriever( + retrievers=[dummy_retriever, tfidf_retriever, knn_retriever], + weights=[0.6, 0.3, 0.1], + ) + docs = ensemble_retriever.get_relevant_documents("I like apples") + assert len(docs) == 3 diff --git a/libs/langchain/tests/unit_tests/retrievers/test_web_research.py b/libs/langchain/tests/unit_tests/retrievers/test_web_research.py index a052e59b72264..29878dd3b4b9d 100644 --- a/libs/langchain/tests/unit_tests/retrievers/test_web_research.py +++ b/libs/langchain/tests/unit_tests/retrievers/test_web_research.py @@ -33,4 +33,4 @@ def test_list_output_parser(text: str, expected: List[str]) -> None: parser = QuestionListOutputParser() result = parser.parse(text) - assert result.lines == expected + assert result == expected diff --git a/libs/partners/ai21/.gitignore b/libs/partners/ai21/.gitignore new file mode 100644 index 0000000000000..bee8a64b79a99 --- /dev/null +++ b/libs/partners/ai21/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/libs/partners/ai21/LICENSE b/libs/partners/ai21/LICENSE new file mode 100644 index 0000000000000..426b65090341f --- /dev/null +++ b/libs/partners/ai21/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/partners/ai21/Makefile b/libs/partners/ai21/Makefile new file mode 100644 index 0000000000000..1e9cd1ea594b2 --- /dev/null +++ b/libs/partners/ai21/Makefile @@ -0,0 +1,56 @@ +.PHONY: all format lint test tests integration_tests docker_tests help extended_tests + +# Default target executed when no arguments are given to make. +all: help + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ +integration_test integration_tests: TEST_FILE = tests/integration_tests/ +test tests integration_test integration_tests: + poetry run pytest $(TEST_FILE) + + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/ai21 --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_ai21 +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + poetry run ruff . + poetry run ruff format $(PYTHON_FILES) --diff + poetry run ruff --select I $(PYTHON_FILES) + mkdir $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + poetry run ruff format $(PYTHON_FILES) + poetry run ruff --select I --fix $(PYTHON_FILES) + +spell_check: + poetry run codespell --toml pyproject.toml + +spell_fix: + poetry run codespell --toml pyproject.toml -w + +check_imports: $(shell find langchain_ai21 -name '*.py') + poetry run python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/ai21/README.md b/libs/partners/ai21/README.md new file mode 100644 index 0000000000000..7b563969e94df --- /dev/null +++ b/libs/partners/ai21/README.md @@ -0,0 +1,75 @@ +# langchain-ai21 + +This package contains the LangChain integrations for [AI21](https://docs.ai21.com/) through their [AI21](https://pypi.org/project/ai21/) SDK. + +## Installation and Setup + +- Install the AI21 partner package +```bash +pip install langchain-ai21 +``` +- Get an AI21 api key and set it as an environment variable (`AI21_API_KEY`) + + +## Chat Models + +This package contains the `ChatAI21` class, which is the recommended way to interface with AI21 Chat models. + +To use, install the requirements, and configure your environment. + +```bash +export AI21_API_KEY=your-api-key +``` + +Then initialize + +```python +from langchain_core.messages import HumanMessage +from langchain_ai21.chat_models import ChatAI21 + +chat = ChatAI21(model="j2-ultra") +messages = [HumanMessage(content="Hello from AI21")] +chat.invoke(messages) +``` + +## LLMs +You can use AI21's generative AI models as Langchain LLMs: + +```python +from langchain.prompts import PromptTemplate +from langchain_ai21 import AI21LLM + +llm = AI21LLM(model="j2-ultra") + +template = """Question: {question} + +Answer: Let's think step by step.""" +prompt = PromptTemplate.from_template(template) + +chain = prompt | llm + +question = "Which scientist discovered relativity?" +print(chain.invoke({"question": question})) +``` + +## Embeddings + +You can use AI21's embeddings models as: + +### Query + +```python +from langchain_ai21 import AI21Embeddings + +embeddings = AI21Embeddings() +embeddings.embed_query("Hello! This is some query") +``` + +### Document + +```python +from langchain_ai21 import AI21Embeddings + +embeddings = AI21Embeddings() +embeddings.embed_documents(["Hello! This is document 1", "And this is document 2!"]) +``` diff --git a/libs/partners/ai21/langchain_ai21/__init__.py b/libs/partners/ai21/langchain_ai21/__init__.py new file mode 100644 index 0000000000000..253de3778c91e --- /dev/null +++ b/libs/partners/ai21/langchain_ai21/__init__.py @@ -0,0 +1,9 @@ +from langchain_ai21.chat_models import ChatAI21 +from langchain_ai21.embeddings import AI21Embeddings +from langchain_ai21.llms import AI21LLM + +__all__ = [ + "AI21LLM", + "ChatAI21", + "AI21Embeddings", +] diff --git a/libs/partners/ai21/langchain_ai21/ai21_base.py b/libs/partners/ai21/langchain_ai21/ai21_base.py new file mode 100644 index 0000000000000..39c5ffbf1f054 --- /dev/null +++ b/libs/partners/ai21/langchain_ai21/ai21_base.py @@ -0,0 +1,48 @@ +import os +from typing import Dict, Optional + +from ai21 import AI21Client +from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator +from langchain_core.utils import convert_to_secret_str + +_DEFAULT_TIMEOUT_SEC = 300 + + +class AI21Base(BaseModel): + class Config: + arbitrary_types_allowed = True + + client: AI21Client = Field(default=None) + api_key: Optional[SecretStr] = None + api_host: Optional[str] = None + timeout_sec: Optional[float] = None + num_retries: Optional[int] = None + + @root_validator() + def validate_environment(cls, values: Dict) -> Dict: + api_key = convert_to_secret_str( + values.get("api_key") or os.getenv("AI21_API_KEY") or "" + ) + values["api_key"] = api_key + + api_host = ( + values.get("api_host") + or os.getenv("AI21_API_URL") + or "https://api.ai21.com" + ) + values["api_host"] = api_host + + timeout_sec = values.get("timeout_sec") or float( + os.getenv("AI21_TIMEOUT_SEC", _DEFAULT_TIMEOUT_SEC) + ) + values["timeout_sec"] = timeout_sec + + if values.get("client") is None: + values["client"] = AI21Client( + api_key=api_key.get_secret_value(), + api_host=api_host, + timeout_sec=None if timeout_sec is None else float(timeout_sec), + via="langchain", + ) + + return values diff --git a/libs/partners/ai21/langchain_ai21/chat_models.py b/libs/partners/ai21/langchain_ai21/chat_models.py new file mode 100644 index 0000000000000..0839a493862e2 --- /dev/null +++ b/libs/partners/ai21/langchain_ai21/chat_models.py @@ -0,0 +1,171 @@ +import asyncio +from functools import partial +from typing import Any, List, Optional, Tuple, cast + +from ai21.models import ChatMessage, Penalty, RoleType +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) +from langchain_core.language_models.chat_models import BaseChatModel +from langchain_core.messages import ( + AIMessage, + BaseMessage, + HumanMessage, + SystemMessage, +) +from langchain_core.outputs import ChatGeneration, ChatResult + +from langchain_ai21.ai21_base import AI21Base + + +def _get_system_message_from_message(message: BaseMessage) -> str: + if not isinstance(message.content, str): + raise ValueError( + f"System Message must be of type str. Got {type(message.content)}" + ) + + return message.content + + +def _convert_messages_to_ai21_messages( + messages: List[BaseMessage], +) -> Tuple[Optional[str], List[ChatMessage]]: + system_message = None + converted_messages: List[ChatMessage] = [] + + for i, message in enumerate(messages): + if message.type == "system": + if i != 0: + raise ValueError("System message must be at beginning of message list.") + else: + system_message = _get_system_message_from_message(message) + else: + converted_message = _convert_message_to_ai21_message(message) + converted_messages.append(converted_message) + + return system_message, converted_messages + + +def _convert_message_to_ai21_message( + message: BaseMessage, +) -> ChatMessage: + content = cast(str, message.content) + + role = None + + if isinstance(message, HumanMessage): + role = RoleType.USER + elif isinstance(message, AIMessage): + role = RoleType.ASSISTANT + + if not role: + raise ValueError( + f"Could not resolve role type from message {message}. " + f"Only support {HumanMessage.__name__} and {AIMessage.__name__}." + ) + + return ChatMessage(role=role, text=content) + + +def _pop_system_messages(messages: List[BaseMessage]) -> List[SystemMessage]: + system_message_indexes = [ + i for i, message in enumerate(messages) if isinstance(message, SystemMessage) + ] + + return [cast(SystemMessage, messages.pop(i)) for i in system_message_indexes] + + +class ChatAI21(BaseChatModel, AI21Base): + """ChatAI21 chat model. + + Example: + .. code-block:: python + + from langchain_ai21 import ChatAI21 + + + model = ChatAI21() + """ + + model: str + """Model type you wish to interact with. + You can view the options at https://github.com/AI21Labs/ai21-python?tab=readme-ov-file#model-types""" + num_results: int = 1 + """The number of responses to generate for a given prompt.""" + + max_tokens: int = 16 + """The maximum number of tokens to generate for each response.""" + + min_tokens: int = 0 + """The minimum number of tokens to generate for each response.""" + + temperature: float = 0.7 + """A value controlling the "creativity" of the model's responses.""" + + top_p: float = 1 + """A value controlling the diversity of the model's responses.""" + + top_k_return: int = 0 + """The number of top-scoring tokens to consider for each generation step.""" + + frequency_penalty: Optional[Penalty] = None + """A penalty applied to tokens that are frequently generated.""" + + presence_penalty: Optional[Penalty] = None + """ A penalty applied to tokens that are already present in the prompt.""" + + count_penalty: Optional[Penalty] = None + """A penalty applied to tokens based on their frequency + in the generated responses.""" + + class Config: + """Configuration for this pydantic object.""" + + arbitrary_types_allowed = True + + @property + def _llm_type(self) -> str: + """Return type of chat model.""" + return "chat-ai21" + + def _generate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + system, ai21_messages = _convert_messages_to_ai21_messages(messages) + + response = self.client.chat.create( + model=self.model, + messages=ai21_messages, + system=system or "", + num_results=self.num_results, + temperature=self.temperature, + max_tokens=self.max_tokens, + min_tokens=self.min_tokens, + top_p=self.top_p, + top_k_return=self.top_k_return, + stop_sequences=stop, + frequency_penalty=self.frequency_penalty, + presence_penalty=self.presence_penalty, + count_penalty=self.count_penalty, + **kwargs, + ) + + outputs = response.outputs + message = AIMessage(content=outputs[0].text) + return ChatResult(generations=[ChatGeneration(message=message)]) + + async def _agenerate( + self, + messages: List[BaseMessage], + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> ChatResult: + return await asyncio.get_running_loop().run_in_executor( + None, partial(self._generate, **kwargs), messages, stop, run_manager + ) diff --git a/libs/partners/ai21/langchain_ai21/embeddings.py b/libs/partners/ai21/langchain_ai21/embeddings.py new file mode 100644 index 0000000000000..59fad67b5c876 --- /dev/null +++ b/libs/partners/ai21/langchain_ai21/embeddings.py @@ -0,0 +1,41 @@ +from typing import Any, List + +from ai21.models import EmbedType +from langchain_core.embeddings import Embeddings + +from langchain_ai21.ai21_base import AI21Base + + +class AI21Embeddings(Embeddings, AI21Base): + """AI21 Embeddings embedding model. + To use, you should have the 'AI21_API_KEY' environment variable set + or pass as a named parameter to the constructor. + + Example: + .. code-block:: python + + from langchain_ai21 import AI21Embeddings + + embeddings = AI21Embeddings() + query_result = embeddings.embed_query("Hello embeddings world!") + """ + + def embed_documents(self, texts: List[str], **kwargs: Any) -> List[List[float]]: + """Embed search docs.""" + response = self.client.embed.create( + texts=texts, + type=EmbedType.SEGMENT, + **kwargs, + ) + + return [result.embedding for result in response.results] + + def embed_query(self, text: str, **kwargs: Any) -> List[float]: + """Embed query text.""" + response = self.client.embed.create( + texts=[text], + type=EmbedType.QUERY, + **kwargs, + ) + + return [result.embedding for result in response.results][0] diff --git a/libs/partners/ai21/langchain_ai21/llms.py b/libs/partners/ai21/langchain_ai21/llms.py new file mode 100644 index 0000000000000..27a8121bbe154 --- /dev/null +++ b/libs/partners/ai21/langchain_ai21/llms.py @@ -0,0 +1,142 @@ +import asyncio +from functools import partial +from typing import ( + Any, + List, + Optional, +) + +from ai21.models import CompletionsResponse, Penalty +from langchain_core.callbacks import ( + AsyncCallbackManagerForLLMRun, + CallbackManagerForLLMRun, +) +from langchain_core.language_models import BaseLLM +from langchain_core.outputs import Generation, LLMResult + +from langchain_ai21.ai21_base import AI21Base + + +class AI21LLM(BaseLLM, AI21Base): + """AI21LLM large language models. + + Example: + .. code-block:: python + + from langchain_ai21 import AI21LLM + + model = AI21LLM() + """ + + model: str + """Model type you wish to interact with. + You can view the options at https://github.com/AI21Labs/ai21-python?tab=readme-ov-file#model-types""" + + num_results: int = 1 + """The number of responses to generate for a given prompt.""" + + max_tokens: int = 16 + """The maximum number of tokens to generate for each response.""" + + min_tokens: int = 0 + """The minimum number of tokens to generate for each response.""" + + temperature: float = 0.7 + """A value controlling the "creativity" of the model's responses.""" + + top_p: float = 1 + """A value controlling the diversity of the model's responses.""" + + top_k_returns: int = 0 + """The number of top-scoring tokens to consider for each generation step.""" + + frequency_penalty: Optional[Penalty] = None + """A penalty applied to tokens that are frequently generated.""" + + presence_penalty: Optional[Penalty] = None + """ A penalty applied to tokens that are already present in the prompt.""" + + count_penalty: Optional[Penalty] = None + """A penalty applied to tokens based on their frequency + in the generated responses.""" + + custom_model: Optional[str] = None + epoch: Optional[int] = None + + class Config: + """Configuration for this pydantic object.""" + + allow_population_by_field_name = True + + @property + def _llm_type(self) -> str: + """Return type of LLM.""" + return "ai21-llm" + + def _generate( + self, + prompts: List[str], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> LLMResult: + generations: List[List[Generation]] = [] + token_count = 0 + + for prompt in prompts: + response = self._invoke_completion( + prompt=prompt, model=self.model, stop_sequences=stop, **kwargs + ) + generation = self._response_to_generation(response) + generations.append(generation) + token_count += self.client.count_tokens(prompt) + + llm_output = {"token_count": token_count, "model_name": self.model} + return LLMResult(generations=generations, llm_output=llm_output) + + async def _agenerate( + self, + prompts: List[str], + stop: Optional[List[str]] = None, + run_manager: Optional[AsyncCallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> LLMResult: + # Change implementation if integration natively supports async generation. + return await asyncio.get_running_loop().run_in_executor( + None, partial(self._generate, **kwargs), prompts, stop, run_manager + ) + + def _invoke_completion( + self, + prompt: str, + model: str, + stop_sequences: Optional[List[str]] = None, + **kwargs: Any, + ) -> CompletionsResponse: + return self.client.completion.create( + prompt=prompt, + model=model, + max_tokens=self.max_tokens, + num_results=self.num_results, + min_tokens=self.min_tokens, + temperature=self.temperature, + top_p=self.top_p, + top_k_return=self.top_k_returns, + custom_model=self.custom_model, + stop_sequences=stop_sequences, + frequency_penalty=self.frequency_penalty, + presence_penalty=self.presence_penalty, + count_penalty=self.count_penalty, + epoch=self.epoch, + ) + + def _response_to_generation( + self, response: CompletionsResponse + ) -> List[Generation]: + return [ + Generation( + text=completion.data.text, + generation_info=completion.to_dict(), + ) + for completion in response.completions + ] diff --git a/libs/partners/ai21/langchain_ai21/py.typed b/libs/partners/ai21/langchain_ai21/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ai21/poetry.lock b/libs/partners/ai21/poetry.lock new file mode 100644 index 0000000000000..49a59858f739b --- /dev/null +++ b/libs/partners/ai21/poetry.lock @@ -0,0 +1,975 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "ai21" +version = "2.0.0" +description = "" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "ai21-2.0.0-py3-none-any.whl", hash = "sha256:0f40b0fe9254b1ddface81681aa18b74635dbf318ca1aeab38c31b500ecd1fe5"}, + {file = "ai21-2.0.0.tar.gz", hash = "sha256:37ac1bda7c2584aafc05cfc69f3e43f7c46571aea29ff292a1f40950f4d9aa70"}, +] + +[package.dependencies] +ai21-tokenizer = ">=0.3.9,<0.4.0" +dataclasses-json = ">=0.6.3,<0.7.0" +requests = ">=2.31.0,<3.0.0" + +[package.extras] +aws = ["boto3 (>=1.28.82,<2.0.0)"] + +[[package]] +name = "ai21-tokenizer" +version = "0.3.11" +description = "" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "ai21_tokenizer-0.3.11-py3-none-any.whl", hash = "sha256:80d332c51cab3fa88f0fea7493240a6a5bc38fd24a3d0806d28731d8fc97691f"}, + {file = "ai21_tokenizer-0.3.11.tar.gz", hash = "sha256:ec11ce4e46d24f71f1c2756ad0de34e0adfd51b5bcd81b544aea13d6935ec905"}, +] + +[package.dependencies] +sentencepiece = ">=0.1.96,<0.2.0" + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "codespell" +version = "2.2.6" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.3" +description = "Easily serialize dataclasses to and from JSON." +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "dataclasses_json-0.6.3-py3-none-any.whl", hash = "sha256:4aeb343357997396f6bca1acae64e486c3a723d8f5c76301888abeccf0c45176"}, + {file = "dataclasses_json-0.6.3.tar.gz", hash = "sha256:35cb40aae824736fdf959801356641836365219cfe14caeb115c39136f775d2a"}, +] + +[package.dependencies] +marshmallow = ">=3.18.0,<4.0.0" +typing-inspect = ">=0.4.0,<1" + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "langchain-core" +version = "0.1.22" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [] +develop = true + +[package.dependencies] +anyio = ">=3,<5" +jsonpatch = "^1.33" +langsmith = "^0.0.87" +packaging = "^23.2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = "^2" +tenacity = "^8.1.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[package.source] +type = "directory" +url = "../../core" + +[[package]] +name = "langsmith" +version = "0.0.87" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, +] + +[package.dependencies] +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "marshmallow" +version = "3.20.2" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.8" +files = [ + {file = "marshmallow-3.20.2-py3-none-any.whl", hash = "sha256:c21d4b98fee747c130e6bc8f45c4b3199ea66bc00c12ee1f639f0aeca034d5e9"}, + {file = "marshmallow-3.20.2.tar.gz", hash = "sha256:4c1daff273513dc5eb24b219a8035559dc573c8f322558ef85f5438ddd1236dd"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["pre-commit (>=2.4,<4.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.15)", "autodocsumm (==0.2.12)", "sphinx (==7.2.6)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["pre-commit (>=2.4,<4.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "mypy" +version = "0.991" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, + {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, + {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, + {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, + {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, + {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, + {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, + {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, + {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, + {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, + {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, + {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, + {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, + {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, + {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, + {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, + {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, + {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, + {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, + {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, + {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, + {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, + {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.5.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, + {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.6" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.6" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, + {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, + {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, + {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, + {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, + {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, + {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, + {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, + {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, + {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, + {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, + {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, + {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, + {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, + {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, + {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, + {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, + {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, + {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, + {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, + {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, + {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, + {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-watcher" +version = "0.3.4" +description = "Automatically rerun your tests on file modifications" +optional = false +python-versions = ">=3.7.0,<4.0.0" +files = [ + {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, + {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, +] + +[package.dependencies] +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} +watchdog = ">=2.0.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.1.9" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e6a212f436122ac73df851f0cf006e0c6612fe6f9c864ed17ebefce0eff6a5fd"}, + {file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:28d920e319783d5303333630dae46ecc80b7ba294aeffedf946a02ac0b7cc3db"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:104aa9b5e12cb755d9dce698ab1b97726b83012487af415a4512fedd38b1459e"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e63bf5a4a91971082a4768a0aba9383c12392d0d6f1e2be2248c1f9054a20da"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d0738917c203246f3e275b37006faa3aa96c828b284ebfe3e99a8cb413c8c4b"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69dac82d63a50df2ab0906d97a01549f814b16bc806deeac4f064ff95c47ddf5"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2aec598fb65084e41a9c5d4b95726173768a62055aafb07b4eff976bac72a592"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:744dfe4b35470fa3820d5fe45758aace6269c578f7ddc43d447868cfe5078bcb"}, + {file = "ruff-0.1.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479ca4250cab30f9218b2e563adc362bd6ae6343df7c7b5a7865300a5156d5a6"}, + {file = "ruff-0.1.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:aa8344310f1ae79af9ccd6e4b32749e93cddc078f9b5ccd0e45bd76a6d2e8bb6"}, + {file = "ruff-0.1.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:837c739729394df98f342319f5136f33c65286b28b6b70a87c28f59354ec939b"}, + {file = "ruff-0.1.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e6837202c2859b9f22e43cb01992373c2dbfeae5c0c91ad691a4a2e725392464"}, + {file = "ruff-0.1.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:331aae2cd4a0554667ac683243b151c74bd60e78fb08c3c2a4ac05ee1e606a39"}, + {file = "ruff-0.1.9-py3-none-win32.whl", hash = "sha256:8151425a60878e66f23ad47da39265fc2fad42aed06fb0a01130e967a7a064f4"}, + {file = "ruff-0.1.9-py3-none-win_amd64.whl", hash = "sha256:c497d769164df522fdaf54c6eba93f397342fe4ca2123a2e014a5b8fc7df81c7"}, + {file = "ruff-0.1.9-py3-none-win_arm64.whl", hash = "sha256:0e17f53bcbb4fff8292dfd84cf72d767b5e146f009cccd40c2fad27641f8a7a9"}, + {file = "ruff-0.1.9.tar.gz", hash = "sha256:b041dee2734719ddbb4518f762c982f2e912e7f28b8ee4fe1dee0b15d1b6e800"}, +] + +[[package]] +name = "sentencepiece" +version = "0.1.99" +description = "SentencePiece python wrapper" +optional = false +python-versions = "*" +files = [ + {file = "sentencepiece-0.1.99-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0eb528e70571b7c02723e5804322469b82fe7ea418c96051d0286c0fa028db73"}, + {file = "sentencepiece-0.1.99-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77d7fafb2c4e4659cbdf303929503f37a26eabc4ff31d3a79bf1c5a1b338caa7"}, + {file = "sentencepiece-0.1.99-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be9cf5b9e404c245aeb3d3723c737ba7a8f5d4ba262ef233a431fa6c45f732a0"}, + {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baed1a26464998f9710d20e52607c29ffd4293e7c71c6a1f83f51ad0911ec12c"}, + {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9832f08bb372d4c8b567612f8eab9e36e268dff645f1c28f9f8e851be705f6d1"}, + {file = "sentencepiece-0.1.99-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:019e7535108e309dae2b253a75834fc3128240aa87c00eb80732078cdc182588"}, + {file = "sentencepiece-0.1.99-cp310-cp310-win32.whl", hash = "sha256:fa16a830416bb823fa2a52cbdd474d1f7f3bba527fd2304fb4b140dad31bb9bc"}, + {file = "sentencepiece-0.1.99-cp310-cp310-win_amd64.whl", hash = "sha256:14b0eccb7b641d4591c3e12ae44cab537d68352e4d3b6424944f0c447d2348d5"}, + {file = "sentencepiece-0.1.99-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6d3c56f24183a1e8bd61043ff2c58dfecdc68a5dd8955dc13bab83afd5f76b81"}, + {file = "sentencepiece-0.1.99-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ed6ea1819fd612c989999e44a51bf556d0ef6abfb553080b9be3d347e18bcfb7"}, + {file = "sentencepiece-0.1.99-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2a0260cd1fb7bd8b4d4f39dc2444a8d5fd4e0a0c4d5c899810ef1abf99b2d45"}, + {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a1abff4d1ff81c77cac3cc6fefa34fa4b8b371e5ee51cb7e8d1ebc996d05983"}, + {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:004e6a621d4bc88978eecb6ea7959264239a17b70f2cbc348033d8195c9808ec"}, + {file = "sentencepiece-0.1.99-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db361e03342c41680afae5807590bc88aa0e17cfd1a42696a160e4005fcda03b"}, + {file = "sentencepiece-0.1.99-cp311-cp311-win32.whl", hash = "sha256:2d95e19168875b70df62916eb55428a0cbcb834ac51d5a7e664eda74def9e1e0"}, + {file = "sentencepiece-0.1.99-cp311-cp311-win_amd64.whl", hash = "sha256:f90d73a6f81248a909f55d8e6ef56fec32d559e1e9af045f0b0322637cb8e5c7"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:62e24c81e74bd87a6e0d63c51beb6527e4c0add67e1a17bac18bcd2076afcfeb"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57efcc2d51caff20d9573567d9fd3f854d9efe613ed58a439c78c9f93101384a"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a904c46197993bd1e95b93a6e373dca2f170379d64441041e2e628ad4afb16f"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89adf59854741c0d465f0e1525b388c0d174f611cc04af54153c5c4f36088c4"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-win32.whl", hash = "sha256:47c378146928690d1bc106fdf0da768cebd03b65dd8405aa3dd88f9c81e35dba"}, + {file = "sentencepiece-0.1.99-cp36-cp36m-win_amd64.whl", hash = "sha256:9ba142e7a90dd6d823c44f9870abdad45e6c63958eb60fe44cca6828d3b69da2"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7b1a9ae4d7c6f1f867e63370cca25cc17b6f4886729595b885ee07a58d3cec3"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0f644c9d4d35c096a538507b2163e6191512460035bf51358794a78515b74f7"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8843d23a0f686d85e569bd6dcd0dd0e0cbc03731e63497ca6d5bacd18df8b85"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33e6f690a1caebb4867a2e367afa1918ad35be257ecdb3455d2bbd787936f155"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-win32.whl", hash = "sha256:8a321866c2f85da7beac74a824b4ad6ddc2a4c9bccd9382529506d48f744a12c"}, + {file = "sentencepiece-0.1.99-cp37-cp37m-win_amd64.whl", hash = "sha256:c42f753bcfb7661c122a15b20be7f684b61fc8592c89c870adf52382ea72262d"}, + {file = "sentencepiece-0.1.99-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85b476406da69c70586f0bb682fcca4c9b40e5059814f2db92303ea4585c650c"}, + {file = "sentencepiece-0.1.99-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cfbcfe13c69d3f87b7fcd5da168df7290a6d006329be71f90ba4f56bc77f8561"}, + {file = "sentencepiece-0.1.99-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:445b0ec381af1cd4eef95243e7180c63d9c384443c16c4c47a28196bd1cda937"}, + {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6890ea0f2b4703f62d0bf27932e35808b1f679bdb05c7eeb3812b935ba02001"}, + {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb71af492b0eefbf9f2501bec97bcd043b6812ab000d119eaf4bd33f9e283d03"}, + {file = "sentencepiece-0.1.99-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27b866b5bd3ddd54166bbcbf5c8d7dd2e0b397fac8537991c7f544220b1f67bc"}, + {file = "sentencepiece-0.1.99-cp38-cp38-win32.whl", hash = "sha256:b133e8a499eac49c581c3c76e9bdd08c338cc1939e441fee6f92c0ccb5f1f8be"}, + {file = "sentencepiece-0.1.99-cp38-cp38-win_amd64.whl", hash = "sha256:0eaf3591dd0690a87f44f4df129cf8d05d8a4029b5b6709b489b8e27f9a9bcff"}, + {file = "sentencepiece-0.1.99-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38efeda9bbfb55052d482a009c6a37e52f42ebffcea9d3a98a61de7aee356a28"}, + {file = "sentencepiece-0.1.99-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c030b081dc1e1bcc9fadc314b19b740715d3d566ad73a482da20d7d46fd444c"}, + {file = "sentencepiece-0.1.99-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84dbe53e02e4f8a2e45d2ac3e430d5c83182142658e25edd76539b7648928727"}, + {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b0f55d0a0ee1719b4b04221fe0c9f0c3461dc3dabd77a035fa2f4788eb3ef9a"}, + {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18e800f206cd235dc27dc749299e05853a4e4332e8d3dfd81bf13d0e5b9007d9"}, + {file = "sentencepiece-0.1.99-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae1c40cda8f9d5b0423cfa98542735c0235e7597d79caf318855cdf971b2280"}, + {file = "sentencepiece-0.1.99-cp39-cp39-win32.whl", hash = "sha256:c84ce33af12ca222d14a1cdd37bd76a69401e32bc68fe61c67ef6b59402f4ab8"}, + {file = "sentencepiece-0.1.99-cp39-cp39-win_amd64.whl", hash = "sha256:350e5c74d739973f1c9643edb80f7cc904dc948578bcb1d43c6f2b173e5d18dd"}, + {file = "sentencepiece-0.1.99.tar.gz", hash = "sha256:189c48f5cb2949288f97ccdb97f0473098d9c3dcf5a3d99d4eabe719ec27297f"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "syrupy" +version = "4.6.0" +description = "Pytest Snapshot Test Utility" +optional = false +python-versions = ">=3.8.1,<4" +files = [ + {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, + {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<8.0.0" + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +description = "Runtime inspection utilities for typing module." +optional = false +python-versions = "*" +files = [ + {file = "typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f"}, + {file = "typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78"}, +] + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "urllib3" +version = "2.1.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<4.0" +content-hash = "95343e4ae1dd67887432aa1acd09e5f49cbc33c2ddcfdbad9552a90a92adda79" diff --git a/libs/partners/ai21/pyproject.toml b/libs/partners/ai21/pyproject.toml new file mode 100644 index 0000000000000..9274ceb033556 --- /dev/null +++ b/libs/partners/ai21/pyproject.toml @@ -0,0 +1,90 @@ +[tool.poetry] +name = "langchain-ai21" +version = "0.0.1" +description = "An integration package connecting AI21 and LangChain" +authors = [] +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +langchain-core = "^0.1.22" +ai21 = "^2.0.0" + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.3.0" +freezegun = "^1.2.2" +pytest-mock = "^3.10.0" +syrupy = "^4.0.2" +pytest-watcher = "^0.3.4" +pytest-asyncio = "^0.21.1" +langchain-core = {path = "../../core", develop = true} + +[tool.poetry.group.codespell] +optional = true + +[tool.poetry.group.codespell.dependencies] +codespell = "^2.2.0" + +[tool.poetry.group.test_integration] +optional = true + +[tool.poetry.group.test_integration.dependencies] + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "^0.1.5" + +[tool.poetry.group.typing.dependencies] +mypy = "^0.991" +langchain-core = {path = "../../core", develop = true} + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +langchain-core = {path = "../../core", develop = true} + +[tool.ruff] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort +] + +[tool.mypy] +disallow_untyped_defs = "True" + +[tool.coverage.run] +omit = [ + "tests/*", +] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +# --strict-markers will raise errors on unknown marks. +# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks +# +# https://docs.pytest.org/en/7.1.x/reference/reference.html +# --strict-config any warnings encountered while parsing the `pytest` +# section of the configuration file raise errors. +# +# https://github.com/tophat/syrupy +# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. +addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" +# Registering custom markers. +# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers +markers = [ + "requires: mark tests as requiring a specific library", + "asyncio: mark tests as requiring asyncio", + "compile: mark placeholder test used to compile integration tests without running them", + "scheduled: mark tests to run in scheduled testing", +] +asyncio_mode = "auto" diff --git a/libs/partners/ai21/scripts/check_imports.py b/libs/partners/ai21/scripts/check_imports.py new file mode 100644 index 0000000000000..fd21a4975b7f0 --- /dev/null +++ b/libs/partners/ai21/scripts/check_imports.py @@ -0,0 +1,17 @@ +import sys +import traceback +from importlib.machinery import SourceFileLoader + +if __name__ == "__main__": + files = sys.argv[1:] + has_failure = False + for file in files: + try: + SourceFileLoader("x", file).load_module() + except Exception: + has_faillure = True + print(file) + traceback.print_exc() + print() + + sys.exit(1 if has_failure else 0) diff --git a/libs/partners/ai21/scripts/check_pydantic.sh b/libs/partners/ai21/scripts/check_pydantic.sh new file mode 100755 index 0000000000000..06b5bb81ae236 --- /dev/null +++ b/libs/partners/ai21/scripts/check_pydantic.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# This script searches for lines starting with "import pydantic" or "from pydantic" +# in tracked files within a Git repository. +# +# Usage: ./scripts/check_pydantic.sh /path/to/repository + +# Check if a path argument is provided +if [ $# -ne 1 ]; then + echo "Usage: $0 /path/to/repository" + exit 1 +fi + +repository_path="$1" + +# Search for lines matching the pattern within the specified repository +result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') + +# Check if any matching lines were found +if [ -n "$result" ]; then + echo "ERROR: The following lines need to be updated:" + echo "$result" + echo "Please replace the code with an import from langchain_core.pydantic_v1." + echo "For example, replace 'from pydantic import BaseModel'" + echo "with 'from langchain_core.pydantic_v1 import BaseModel'" + exit 1 +fi diff --git a/libs/partners/ai21/scripts/lint_imports.sh b/libs/partners/ai21/scripts/lint_imports.sh new file mode 100755 index 0000000000000..695613c7ba8fd --- /dev/null +++ b/libs/partners/ai21/scripts/lint_imports.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -eu + +# Initialize a variable to keep track of errors +errors=0 + +# make sure not importing from langchain or langchain_experimental +git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) + +# Decide on an exit status based on the errors +if [ "$errors" -gt 0 ]; then + exit 1 +else + exit 0 +fi diff --git a/libs/partners/ai21/tests/__init__.py b/libs/partners/ai21/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ai21/tests/integration_tests/__init__.py b/libs/partners/ai21/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ai21/tests/integration_tests/test_chat_models.py b/libs/partners/ai21/tests/integration_tests/test_chat_models.py new file mode 100644 index 0000000000000..37efd41e520c4 --- /dev/null +++ b/libs/partners/ai21/tests/integration_tests/test_chat_models.py @@ -0,0 +1,43 @@ +"""Test ChatAI21 chat model.""" +from langchain_core.messages import HumanMessage +from langchain_core.outputs import ChatGeneration + +from langchain_ai21.chat_models import ChatAI21 + + +def test_invoke() -> None: + """Test invoke tokens from AI21.""" + llm = ChatAI21(model="j2-ultra") + + result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"])) + assert isinstance(result.content, str) + + +def test_generation() -> None: + """Test invoke tokens from AI21.""" + llm = ChatAI21(model="j2-ultra") + message = HumanMessage(content="Hello") + + result = llm.generate([[message], [message]], config=dict(tags=["foo"])) + + for generations in result.generations: + assert len(generations) == 1 + for generation in generations: + assert isinstance(generation, ChatGeneration) + assert isinstance(generation.text, str) + assert generation.text == generation.message.content + + +async def test_ageneration() -> None: + """Test invoke tokens from AI21.""" + llm = ChatAI21(model="j2-ultra") + message = HumanMessage(content="Hello") + + result = await llm.agenerate([[message], [message]], config=dict(tags=["foo"])) + + for generations in result.generations: + assert len(generations) == 1 + for generation in generations: + assert isinstance(generation, ChatGeneration) + assert isinstance(generation.text, str) + assert generation.text == generation.message.content diff --git a/libs/partners/ai21/tests/integration_tests/test_compile.py b/libs/partners/ai21/tests/integration_tests/test_compile.py new file mode 100644 index 0000000000000..33ecccdfa0fbd --- /dev/null +++ b/libs/partners/ai21/tests/integration_tests/test_compile.py @@ -0,0 +1,7 @@ +import pytest + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" + pass diff --git a/libs/partners/ai21/tests/integration_tests/test_embeddings.py b/libs/partners/ai21/tests/integration_tests/test_embeddings.py new file mode 100644 index 0000000000000..8434234e56a4b --- /dev/null +++ b/libs/partners/ai21/tests/integration_tests/test_embeddings.py @@ -0,0 +1,19 @@ +"""Test AI21 embeddings.""" +from langchain_ai21.embeddings import AI21Embeddings + + +def test_langchain_ai21_embedding_documents() -> None: + """Test AI21 embeddings.""" + documents = ["foo bar"] + embedding = AI21Embeddings() + output = embedding.embed_documents(documents) + assert len(output) == 1 + assert len(output[0]) > 0 + + +def test_langchain_ai21_embedding_query() -> None: + """Test AI21 embeddings.""" + document = "foo bar" + embedding = AI21Embeddings() + output = embedding.embed_query(document) + assert len(output) > 0 diff --git a/libs/partners/ai21/tests/integration_tests/test_llms.py b/libs/partners/ai21/tests/integration_tests/test_llms.py new file mode 100644 index 0000000000000..fe4a812552723 --- /dev/null +++ b/libs/partners/ai21/tests/integration_tests/test_llms.py @@ -0,0 +1,103 @@ +"""Test AI21LLM llm.""" + + +from langchain_ai21.llms import AI21LLM + + +def _generate_llm() -> AI21LLM: + """ + Testing AI21LLm using non default parameters with the following parameters + """ + return AI21LLM( + model="j2-ultra", + max_tokens=2, # Use less tokens for a faster response + temperature=0, # for a consistent response + epoch=1, + ) + + +def test_stream() -> None: + """Test streaming tokens from AI21.""" + llm = AI21LLM( + model="j2-ultra", + ) + + for token in llm.stream("I'm Pickle Rick"): + assert isinstance(token, str) + + +async def test_abatch() -> None: + """Test streaming tokens from AI21LLM.""" + llm = AI21LLM( + model="j2-ultra", + ) + + result = await llm.abatch(["I'm Pickle Rick", "I'm not Pickle Rick"]) + for token in result: + assert isinstance(token, str) + + +async def test_abatch_tags() -> None: + """Test batch tokens from AI21LLM.""" + llm = AI21LLM( + model="j2-ultra", + ) + + result = await llm.abatch( + ["I'm Pickle Rick", "I'm not Pickle Rick"], config={"tags": ["foo"]} + ) + for token in result: + assert isinstance(token, str) + + +def test_batch() -> None: + """Test batch tokens from AI21LLM.""" + llm = AI21LLM( + model="j2-ultra", + ) + + result = llm.batch(["I'm Pickle Rick", "I'm not Pickle Rick"]) + for token in result: + assert isinstance(token, str) + + +async def test_ainvoke() -> None: + """Test invoke tokens from AI21LLM.""" + llm = AI21LLM( + model="j2-ultra", + ) + + result = await llm.ainvoke("I'm Pickle Rick", config={"tags": ["foo"]}) + assert isinstance(result, str) + + +def test_invoke() -> None: + """Test invoke tokens from AI21LLM.""" + llm = AI21LLM( + model="j2-ultra", + ) + + result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"])) + assert isinstance(result, str) + + +def test__generate() -> None: + llm = _generate_llm() + llm_result = llm.generate( + prompts=["Hey there, my name is Pickle Rick. What is your name?"], + stop=["##"], + ) + + assert len(llm_result.generations) > 0 + assert llm_result.llm_output["token_count"] != 0 # type: ignore + + +async def test__agenerate() -> None: + llm = _generate_llm() + llm_result = await llm.agenerate( + prompts=["Hey there, my name is Pickle Rick. What is your name?"], + stop=["##"], + ) + + assert len(llm_result.generations) > 0 + assert llm_result.llm_output["token_count"] != 0 # type: ignore diff --git a/libs/partners/ai21/tests/unit_tests/__init__.py b/libs/partners/ai21/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ai21/tests/unit_tests/conftest.py b/libs/partners/ai21/tests/unit_tests/conftest.py new file mode 100644 index 0000000000000..43545eaf02e38 --- /dev/null +++ b/libs/partners/ai21/tests/unit_tests/conftest.py @@ -0,0 +1,91 @@ +import os +from contextlib import contextmanager +from typing import Generator +from unittest.mock import Mock + +import pytest +from ai21 import AI21Client +from ai21.models import ( + ChatOutput, + ChatResponse, + Completion, + CompletionData, + CompletionFinishReason, + CompletionsResponse, + FinishReason, + Penalty, + RoleType, +) +from pytest_mock import MockerFixture + +DUMMY_API_KEY = "test_api_key" + + +BASIC_EXAMPLE_LLM_PARAMETERS = { + "num_results": 3, + "max_tokens": 20, + "min_tokens": 10, + "temperature": 0.5, + "top_p": 0.5, + "top_k_return": 0, + "frequency_penalty": Penalty(scale=0.2, apply_to_numbers=True), + "presence_penalty": Penalty(scale=0.2, apply_to_stopwords=True), + "count_penalty": Penalty( + scale=0.2, apply_to_punctuation=True, apply_to_emojis=True + ), +} + + +@pytest.fixture +def mocked_completion_response(mocker: MockerFixture) -> Mock: + mocked_response = mocker.MagicMock(spec=CompletionsResponse) + mocked_response.prompt = "this is a test prompt" + mocked_response.completions = [ + Completion( + data=CompletionData(text="test", tokens=[]), + finish_reason=CompletionFinishReason(reason=None, length=None), + ) + ] + return mocked_response + + +@pytest.fixture +def mock_client_with_completion( + mocker: MockerFixture, mocked_completion_response: Mock +) -> Mock: + mock_client = mocker.MagicMock(spec=AI21Client) + mock_client.completion = mocker.MagicMock() + mock_client.completion.create.side_effect = [ + mocked_completion_response, + mocked_completion_response, + ] + mock_client.count_tokens.side_effect = [10, 20] + + return mock_client + + +@pytest.fixture +def mock_client_with_chat(mocker: MockerFixture) -> Mock: + mock_client = mocker.MagicMock(spec=AI21Client) + mock_client.chat = mocker.MagicMock() + + output = ChatOutput( + text="Hello Pickle Rick!", + role=RoleType.ASSISTANT, + finish_reason=FinishReason(reason="testing"), + ) + mock_client.chat.create.return_value = ChatResponse(outputs=[output]) + + return mock_client + + +@contextmanager +def temporarily_unset_api_key() -> Generator: + """ + Unset and set environment key for testing purpose for when an API KEY is not set + """ + api_key = os.environ.pop("API_KEY", None) + yield + + if api_key is not None: + os.environ["API_KEY"] = api_key diff --git a/libs/partners/ai21/tests/unit_tests/test_chat_models.py b/libs/partners/ai21/tests/unit_tests/test_chat_models.py new file mode 100644 index 0000000000000..83eb06bc45793 --- /dev/null +++ b/libs/partners/ai21/tests/unit_tests/test_chat_models.py @@ -0,0 +1,239 @@ +"""Test chat model integration.""" +from typing import List, Optional +from unittest.mock import Mock, call + +import pytest +from ai21 import MissingApiKeyError +from ai21.models import ChatMessage, Penalty, RoleType +from langchain_core.messages import ( + AIMessage, + BaseMessage, + HumanMessage, + SystemMessage, +) +from langchain_core.messages import ( + ChatMessage as LangChainChatMessage, +) + +from langchain_ai21.chat_models import ( + ChatAI21, + _convert_message_to_ai21_message, + _convert_messages_to_ai21_messages, +) +from tests.unit_tests.conftest import ( + BASIC_EXAMPLE_LLM_PARAMETERS, + DUMMY_API_KEY, + temporarily_unset_api_key, +) + + +def test_initialization__when_no_api_key__should_raise_exception() -> None: + """Test integration initialization.""" + with temporarily_unset_api_key(): + with pytest.raises(MissingApiKeyError): + ChatAI21(model="j2-ultra") + + +def test_initialization__when_default_parameters_in_init() -> None: + """Test chat model initialization.""" + ChatAI21(api_key=DUMMY_API_KEY, model="j2-ultra") + + +def test_initialization__when_custom_parameters_in_init() -> None: + model = "j2-mid" + num_results = 1 + max_tokens = 10 + min_tokens = 20 + temperature = 0.1 + top_p = 0.1 + top_k_returns = 0 + frequency_penalty = Penalty(scale=0.2, apply_to_numbers=True) + presence_penalty = Penalty(scale=0.2, apply_to_stopwords=True) + count_penalty = Penalty(scale=0.2, apply_to_punctuation=True, apply_to_emojis=True) + + llm = ChatAI21( + api_key=DUMMY_API_KEY, + model=model, + num_results=num_results, + max_tokens=max_tokens, + min_tokens=min_tokens, + temperature=temperature, + top_p=top_p, + top_k_returns=top_k_returns, + frequency_penalty=frequency_penalty, + presence_penalty=presence_penalty, + count_penalty=count_penalty, + ) + assert llm.model == model + assert llm.num_results == num_results + assert llm.max_tokens == max_tokens + assert llm.min_tokens == min_tokens + assert llm.temperature == temperature + assert llm.top_p == top_p + assert llm.top_k_return == top_k_returns + assert llm.frequency_penalty == frequency_penalty + assert llm.presence_penalty == presence_penalty + assert count_penalty == count_penalty + + +@pytest.mark.parametrize( + ids=[ + "when_human_message", + "when_ai_message", + ], + argnames=["message", "expected_ai21_message"], + argvalues=[ + ( + HumanMessage(content="Human Message Content"), + ChatMessage(role=RoleType.USER, text="Human Message Content"), + ), + ( + AIMessage(content="AI Message Content"), + ChatMessage(role=RoleType.ASSISTANT, text="AI Message Content"), + ), + ], +) +def test_convert_message_to_ai21_message( + message: BaseMessage, expected_ai21_message: ChatMessage +) -> None: + ai21_message = _convert_message_to_ai21_message(message) + assert ai21_message == expected_ai21_message + + +@pytest.mark.parametrize( + ids=[ + "when_system_message", + "when_langchain_chat_message", + ], + argnames=["message"], + argvalues=[ + (SystemMessage(content="System Message Content"),), + (LangChainChatMessage(content="Chat Message Content", role="human"),), + ], +) +def test_convert_message_to_ai21_message__when_invalid_role__should_raise_exception( + message: BaseMessage, +) -> None: + with pytest.raises(ValueError) as e: + _convert_message_to_ai21_message(message) + assert e.value.args[0] == ( + f"Could not resolve role type from message {message}. " + f"Only support {HumanMessage.__name__} and {AIMessage.__name__}." + ) + + +@pytest.mark.parametrize( + ids=[ + "when_all_messages_are_human_messages__should_return_system_none", + "when_first_message_is_system__should_return_system", + ], + argnames=["messages", "expected_system", "expected_messages"], + argvalues=[ + ( + [ + HumanMessage(content="Human Message Content 1"), + HumanMessage(content="Human Message Content 2"), + ], + None, + [ + ChatMessage(role=RoleType.USER, text="Human Message Content 1"), + ChatMessage(role=RoleType.USER, text="Human Message Content 2"), + ], + ), + ( + [ + SystemMessage(content="System Message Content 1"), + HumanMessage(content="Human Message Content 1"), + ], + "System Message Content 1", + [ + ChatMessage(role=RoleType.USER, text="Human Message Content 1"), + ], + ), + ], +) +def test_convert_messages( + messages: List[BaseMessage], + expected_system: Optional[str], + expected_messages: List[ChatMessage], +) -> None: + system, ai21_messages = _convert_messages_to_ai21_messages(messages) + assert ai21_messages == expected_messages + assert system == expected_system + + +def test_convert_messages_when_system_is_not_first__should_raise_value_error() -> None: + messages = [ + HumanMessage(content="Human Message Content 1"), + SystemMessage(content="System Message Content 1"), + ] + with pytest.raises(ValueError): + _convert_messages_to_ai21_messages(messages) + + +def test_invoke(mock_client_with_chat: Mock) -> None: + chat_input = "I'm Pickle Rick" + + llm = ChatAI21( + model="j2-ultra", + api_key=DUMMY_API_KEY, + client=mock_client_with_chat, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ) + llm.invoke(input=chat_input, config=dict(tags=["foo"])) + + mock_client_with_chat.chat.create.assert_called_once_with( + model="j2-ultra", + messages=[ChatMessage(role=RoleType.USER, text=chat_input)], + system="", + stop_sequences=None, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ) + + +def test_generate(mock_client_with_chat: Mock) -> None: + messages0 = [ + HumanMessage(content="I'm Pickle Rick"), + AIMessage(content="Hello Pickle Rick! I am your AI Assistant"), + HumanMessage(content="Nice to meet you."), + ] + messages1 = [ + SystemMessage(content="system message"), + HumanMessage(content="What is 1 + 1"), + ] + llm = ChatAI21( + model="j2-ultra", + client=mock_client_with_chat, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ) + + llm.generate(messages=[messages0, messages1]) + mock_client_with_chat.chat.create.assert_has_calls( + [ + call( + model="j2-ultra", + messages=[ + ChatMessage( + role=RoleType.USER, + text=str(messages0[0].content), + ), + ChatMessage( + role=RoleType.ASSISTANT, text=str(messages0[1].content) + ), + ChatMessage(role=RoleType.USER, text=str(messages0[2].content)), + ], + system="", + stop_sequences=None, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ), + call( + model="j2-ultra", + messages=[ + ChatMessage(role=RoleType.USER, text=str(messages1[1].content)), + ], + system="system message", + stop_sequences=None, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ), + ] + ) diff --git a/libs/partners/ai21/tests/unit_tests/test_embeddings.py b/libs/partners/ai21/tests/unit_tests/test_embeddings.py new file mode 100644 index 0000000000000..a366b32dd331f --- /dev/null +++ b/libs/partners/ai21/tests/unit_tests/test_embeddings.py @@ -0,0 +1,67 @@ +"""Test embedding model integration.""" +from unittest.mock import Mock + +import pytest +from ai21 import AI21Client, MissingApiKeyError +from ai21.models import EmbedResponse, EmbedResult, EmbedType +from pytest_mock import MockerFixture + +from langchain_ai21.embeddings import AI21Embeddings +from tests.unit_tests.conftest import DUMMY_API_KEY, temporarily_unset_api_key + +_EXAMPLE_EMBEDDING_0 = [1.0, 2.0, 3.0] +_EXAMPLE_EMBEDDING_1 = [4.0, 5.0, 6.0] +_EXAMPLE_EMBEDDING_2 = [7.0, 8.0, 9.0] + +_EXAMPLE_EMBEDDING_RESPONSE = EmbedResponse( + results=[ + EmbedResult(_EXAMPLE_EMBEDDING_0), + EmbedResult(_EXAMPLE_EMBEDDING_1), + EmbedResult(_EXAMPLE_EMBEDDING_2), + ], + id="test_id", +) + + +def test_initialization__when_no_api_key__should_raise_exception() -> None: + """Test integration initialization.""" + with temporarily_unset_api_key(): + with pytest.raises(MissingApiKeyError): + AI21Embeddings() + + +@pytest.fixture +def mock_client_with_embeddings(mocker: MockerFixture) -> Mock: + mock_client = mocker.MagicMock(spec=AI21Client) + mock_client.embed = mocker.MagicMock() + mock_client.embed.create.return_value = _EXAMPLE_EMBEDDING_RESPONSE + + return mock_client + + +def test_embed_query(mock_client_with_embeddings: Mock) -> None: + llm = AI21Embeddings(client=mock_client_with_embeddings, api_key=DUMMY_API_KEY) + + text = "Hello embeddings world!" + response = llm.embed_query(text=text) + assert response == _EXAMPLE_EMBEDDING_0 + mock_client_with_embeddings.embed.create.assert_called_once_with( + texts=[text], + type=EmbedType.QUERY, + ) + + +def test_embed_documents(mock_client_with_embeddings: Mock) -> None: + llm = AI21Embeddings(client=mock_client_with_embeddings, api_key=DUMMY_API_KEY) + + texts = ["Hello embeddings world!", "Some other text", "Some more text"] + response = llm.embed_documents(texts=texts) + assert response == [ + _EXAMPLE_EMBEDDING_0, + _EXAMPLE_EMBEDDING_1, + _EXAMPLE_EMBEDDING_2, + ] + mock_client_with_embeddings.embed.create.assert_called_once_with( + texts=texts, + type=EmbedType.SEGMENT, + ) diff --git a/libs/partners/ai21/tests/unit_tests/test_imports.py b/libs/partners/ai21/tests/unit_tests/test_imports.py new file mode 100644 index 0000000000000..28b11651a7325 --- /dev/null +++ b/libs/partners/ai21/tests/unit_tests/test_imports.py @@ -0,0 +1,11 @@ +from langchain_ai21 import __all__ + +EXPECTED_ALL = [ + "AI21LLM", + "ChatAI21", + "AI21Embeddings", +] + + +def test_all_imports() -> None: + assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/ai21/tests/unit_tests/test_llms.py b/libs/partners/ai21/tests/unit_tests/test_llms.py new file mode 100644 index 0000000000000..a82240bea5d55 --- /dev/null +++ b/libs/partners/ai21/tests/unit_tests/test_llms.py @@ -0,0 +1,107 @@ +"""Test AI21 Chat API wrapper.""" +from unittest.mock import Mock, call + +import pytest +from ai21 import MissingApiKeyError +from ai21.models import ( + Penalty, +) + +from langchain_ai21 import AI21LLM +from tests.unit_tests.conftest import ( + BASIC_EXAMPLE_LLM_PARAMETERS, + DUMMY_API_KEY, + temporarily_unset_api_key, +) + + +def test_initialization__when_no_api_key__should_raise_exception() -> None: + """Test integration initialization.""" + with temporarily_unset_api_key(): + with pytest.raises(MissingApiKeyError): + AI21LLM( + model="j2-ultra", + ) + + +def test_initialization__when_default_parameters() -> None: + """Test integration initialization.""" + AI21LLM( + api_key=DUMMY_API_KEY, + model="j2-ultra", + ) + + +def test_initialization__when_custom_parameters_to_init() -> None: + """Test integration initialization.""" + AI21LLM( + api_key=DUMMY_API_KEY, + model="j2-mid", + num_results=2, + max_tokens=20, + min_tokens=10, + temperature=0.5, + top_p=0.5, + top_k_returns=0, + stop_sequences=["\n"], + frequency_penalty=Penalty(scale=0.2, apply_to_numbers=True), + presence_penalty=Penalty(scale=0.2, apply_to_stopwords=True), + count_penalty=Penalty( + scale=0.2, apply_to_punctuation=True, apply_to_emojis=True + ), + custom_model="test_model", + epoch=1, + ) + + +def test_generate(mock_client_with_completion: Mock) -> None: + # Setup test + prompt0 = "Hi, my name is what?" + prompt1 = "My name is who?" + stop = ["\n"] + custom_model = "test_model" + epoch = 1 + + ai21 = AI21LLM( + model="j2-ultra", + api_key=DUMMY_API_KEY, + client=mock_client_with_completion, + custom_model=custom_model, + epoch=epoch, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ) + + # Make call to testing function + ai21.generate( + [prompt0, prompt1], + stop=stop, + ) + + # Assertions + mock_client_with_completion.count_tokens.assert_has_calls( + [ + call(prompt0), + call(prompt1), + ], + ) + + mock_client_with_completion.completion.create.assert_has_calls( + [ + call( + prompt=prompt0, + model="j2-ultra", + custom_model=custom_model, + stop_sequences=stop, + epoch=epoch, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ), + call( + prompt=prompt1, + model="j2-ultra", + custom_model=custom_model, + stop_sequences=stop, + epoch=epoch, + **BASIC_EXAMPLE_LLM_PARAMETERS, + ), + ] + ) diff --git a/libs/partners/anthropic/Makefile b/libs/partners/anthropic/Makefile index bf2408a100079..c3d56b11324a3 100644 --- a/libs/partners/anthropic/Makefile +++ b/libs/partners/anthropic/Makefile @@ -5,11 +5,9 @@ all: help # Define a variable for the test file path. TEST_FILE ?= tests/unit_tests/ +integration_test integration_tests: TEST_FILE=tests/integration_tests/ -test: - poetry run pytest $(TEST_FILE) - -tests: +test tests integration_test integration_tests: poetry run pytest $(TEST_FILE) diff --git a/libs/partners/anthropic/langchain_anthropic/chat_models.py b/libs/partners/anthropic/langchain_anthropic/chat_models.py index 812b829589e49..a7382c3cd1b30 100644 --- a/libs/partners/anthropic/langchain_anthropic/chat_models.py +++ b/libs/partners/anthropic/langchain_anthropic/chat_models.py @@ -51,7 +51,7 @@ def _format_messages(messages: List[BaseMessage]) -> Tuple[Optional[str], List[D class ChatAnthropicMessages(BaseChatModel): - """Beta ChatAnthropicMessages chat model. + """ChatAnthropicMessages chat model. Example: .. code-block:: python @@ -143,7 +143,7 @@ def _stream( **kwargs: Any, ) -> Iterator[ChatGenerationChunk]: params = self._format_params(messages=messages, stop=stop, **kwargs) - with self._client.beta.messages.stream(**params) as stream: + with self._client.messages.stream(**params) as stream: for text in stream.text_stream: yield ChatGenerationChunk(message=AIMessageChunk(content=text)) @@ -155,7 +155,7 @@ async def _astream( **kwargs: Any, ) -> AsyncIterator[ChatGenerationChunk]: params = self._format_params(messages=messages, stop=stop, **kwargs) - async with self._async_client.beta.messages.stream(**params) as stream: + async with self._async_client.messages.stream(**params) as stream: async for text in stream.text_stream: yield ChatGenerationChunk(message=AIMessageChunk(content=text)) @@ -167,7 +167,7 @@ def _generate( **kwargs: Any, ) -> ChatResult: params = self._format_params(messages=messages, stop=stop, **kwargs) - data = self._client.beta.messages.create(**params) + data = self._client.messages.create(**params) return ChatResult( generations=[ ChatGeneration(message=AIMessage(content=data.content[0].text)) @@ -183,7 +183,7 @@ async def _agenerate( **kwargs: Any, ) -> ChatResult: params = self._format_params(messages=messages, stop=stop, **kwargs) - data = await self._async_client.beta.messages.create(**params) + data = await self._async_client.messages.create(**params) return ChatResult( generations=[ ChatGeneration(message=AIMessage(content=data.content[0].text)) diff --git a/libs/partners/anthropic/poetry.lock b/libs/partners/anthropic/poetry.lock index f7f9d59919118..f06d19b59a0e3 100644 --- a/libs/partners/anthropic/poetry.lock +++ b/libs/partners/anthropic/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -16,13 +16,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anthropic" -version = "0.8.0" +version = "0.16.0" description = "The official Python library for the anthropic API" optional = false python-versions = ">=3.7" files = [ - {file = "anthropic-0.8.0-py3-none-any.whl", hash = "sha256:4cd394b22d16013a8846bbea3ff6cbf83cfa6260af01b7142256204c6a8545da"}, - {file = "anthropic-0.8.0.tar.gz", hash = "sha256:5a5d7fd9149f28d6df875685f66ec68072edd0a9acf919648c041c6fc2da6f11"}, + {file = "anthropic-0.16.0-py3-none-any.whl", hash = "sha256:fb6657050c14c90ebc49df3510541a4f74a926f04598eaa2c4f5a3be772a95da"}, + {file = "anthropic-0.16.0.tar.gz", hash = "sha256:130730820d1b327f31a097f575e42721d7ef69c731728f4c2d7fa70cf3b7eee0"}, ] [package.dependencies] @@ -32,7 +32,11 @@ httpx = ">=0.23.0,<1" pydantic = ">=1.9.0,<3" sniffio = "*" tokenizers = ">=0.13.0" -typing-extensions = ">=4.5,<5" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] [[package]] name = "anyio" @@ -58,13 +62,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -196,13 +200,13 @@ files = [ [[package]] name = "distro" -version = "1.8.0" +version = "1.9.0" description = "Distro - an OS platform information API" optional = false python-versions = ">=3.6" files = [ - {file = "distro-1.8.0-py3-none-any.whl", hash = "sha256:99522ca3e365cac527b44bde033f64c6945d90eb9f769703caaec52b09bbd3ff"}, - {file = "distro-1.8.0.tar.gz", hash = "sha256:02e111d1dc6a50abb8eed6bf31c3e48ed8b0830d1ea2a1b78c61765c2513fdd8"}, + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, ] [[package]] @@ -237,13 +241,13 @@ typing = ["typing-extensions (>=4.8)"] [[package]] name = "freezegun" -version = "1.3.1" +version = "1.4.0" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, - {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -251,13 +255,13 @@ python-dateutil = ">=2.7" [[package]] name = "fsspec" -version = "2023.12.2" +version = "2024.2.0" description = "File-system specification" optional = false python-versions = ">=3.8" files = [ - {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, - {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, + {file = "fsspec-2024.2.0-py3-none-any.whl", hash = "sha256:817f969556fa5916bc682e02ca2045f96ff7f586d45110fcb76022063ad2c7d8"}, + {file = "fsspec-2024.2.0.tar.gz", hash = "sha256:b6ad1a679f760dda52b1168c859d01b7b80648ea6f7f7c7f5a8a91dc3f3ecb84"}, ] [package.extras] @@ -275,7 +279,7 @@ github = ["requests"] gs = ["gcsfs"] gui = ["panel"] hdfs = ["pyarrow (>=1)"] -http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] libarchive = ["libarchive-c"] oci = ["ocifs"] s3 = ["s3fs"] @@ -297,13 +301,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.2" +version = "1.0.3" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.2-py3-none-any.whl", hash = "sha256:096cc05bca73b8e459a1fc3dcf585148f63e534eae4339559c9b8a8d6399acc7"}, - {file = "httpcore-1.0.2.tar.gz", hash = "sha256:9fc092e4799b26174648e54b74ed5f683132a464e95643b226e00c2ed2fa6535"}, + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, ] [package.dependencies] @@ -314,17 +318,17 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.23.0)"] +trio = ["trio (>=0.22.0,<0.24.0)"] [[package]] name = "httpx" -version = "0.25.2" +version = "0.26.0" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpx-0.25.2-py3-none-any.whl", hash = "sha256:a05d3d052d9b2dfce0e3896636467f8a5342fb2b902c819428e1ac65413ca118"}, - {file = "httpx-0.25.2.tar.gz", hash = "sha256:8b8fcaa0c8ea7b05edd69a094e63a2094c4efcb48129fb757361bc423c0ad9e8"}, + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, ] [package.dependencies] @@ -342,13 +346,13 @@ socks = ["socksio (==1.*)"] [[package]] name = "huggingface-hub" -version = "0.19.4" +version = "0.20.3" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.19.4-py3-none-any.whl", hash = "sha256:dba013f779da16f14b606492828f3760600a1e1801432d09fe1c33e50b825bb5"}, - {file = "huggingface_hub-0.19.4.tar.gz", hash = "sha256:176a4fc355a851c17550e7619488f383189727eab209534d7cef2114dae77b22"}, + {file = "huggingface_hub-0.20.3-py3-none-any.whl", hash = "sha256:d988ae4f00d3e307b0c80c6a05ca6dbb7edba8bba3079f74cda7d9c2e562a7b6"}, + {file = "huggingface_hub-0.20.3.tar.gz", hash = "sha256:94e7f8e074475fbc67d6a71957b678e1b4a74ff1b64a644fd6cbb83da962d05d"}, ] [package.dependencies] @@ -361,15 +365,14 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] -docs = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "hf-doc-builder", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)", "watchdog"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "mypy (==1.5.1)", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.1.3)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] inference = ["aiohttp", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)"] quality = ["mypy (==1.5.1)", "ruff (>=0.1.3)"] tensorflow = ["graphviz", "pydot", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "gradio", "jedi", "numpy", "pydantic (>1.1,<2.0)", "pydantic (>1.1,<3.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] @@ -422,7 +425,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.1" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -432,7 +435,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = "~0.0.63" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -448,13 +451,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.71" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.71-py3-none-any.whl", hash = "sha256:dccc8522acbe64723ee6d54cb043f5c8d55512782b5eb3031add6691a00a28e1"}, - {file = "langsmith-0.0.71.tar.gz", hash = "sha256:1c6147563d7d658f5514299526a18d4e69e0e8ed59b99ad87abaf894eed1477a"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -535,13 +538,13 @@ files = [ [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -550,18 +553,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -569,116 +572,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -686,13 +663,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -743,13 +720,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -782,6 +759,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -789,8 +767,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -807,6 +793,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -814,6 +801,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -842,28 +830,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.8" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"}, - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"}, - {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"}, - {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"}, - {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"}, - {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -890,17 +878,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -918,109 +906,121 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tokenizers" -version = "0.15.0" +version = "0.15.2" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.15.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:cd3cd0299aaa312cd2988957598f80becd04d5a07338741eca076057a2b37d6e"}, - {file = "tokenizers-0.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a922c492c721744ee175f15b91704be2d305569d25f0547c77cd6c9f210f9dc"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:331dd786d02fc38698f835fff61c99480f98b73ce75a4c65bd110c9af5e4609a"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88dd0961c437d413ab027f8b115350c121d49902cfbadf08bb8f634b15fa1814"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6fdcc55339df7761cd52e1fbe8185d3b3963bc9e3f3545faa6c84f9e8818259a"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1480b0051d8ab5408e8e4db2dc832f7082ea24aa0722c427bde2418c6f3bd07"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9855e6c258918f9cf62792d4f6ddfa6c56dccd8c8118640f867f6393ecaf8bd7"}, - {file = "tokenizers-0.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de9529fe75efcd54ba8d516aa725e1851df9199f0669b665c55e90df08f5af86"}, - {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8edcc90a36eab0705fe9121d6c77c6e42eeef25c7399864fd57dfb27173060bf"}, - {file = "tokenizers-0.15.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae17884aafb3e94f34fb7cfedc29054f5f54e142475ebf8a265a4e388fee3f8b"}, - {file = "tokenizers-0.15.0-cp310-none-win32.whl", hash = "sha256:9a3241acdc9b44cff6e95c4a55b9be943ef3658f8edb3686034d353734adba05"}, - {file = "tokenizers-0.15.0-cp310-none-win_amd64.whl", hash = "sha256:4b31807cb393d6ea31926b307911c89a1209d5e27629aa79553d1599c8ffdefe"}, - {file = "tokenizers-0.15.0-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:af7e9be8c05d30bb137b9fd20f9d99354816599e5fd3d58a4b1e28ba3b36171f"}, - {file = "tokenizers-0.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3d7343fa562ea29661783344a2d83662db0d3d17a6fa6a403cac8e512d2d9fd"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:32371008788aeeb0309a9244809a23e4c0259625e6b74a103700f6421373f395"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9db64c7c9954fbae698884c5bb089764edc549731e5f9b7fa1dd4e4d78d77f"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dbed5944c31195514669cf6381a0d8d47f164943000d10f93d6d02f0d45c25e0"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aab16c4a26d351d63e965b0c792f5da7227a37b69a6dc6d922ff70aa595b1b0c"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c2b60b12fdd310bf85ce5d7d3f823456b9b65eed30f5438dd7761879c495983"}, - {file = "tokenizers-0.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0344d6602740e44054a9e5bbe9775a5e149c4dddaff15959bb07dcce95a5a859"}, - {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4525f6997d81d9b6d9140088f4f5131f6627e4c960c2c87d0695ae7304233fc3"}, - {file = "tokenizers-0.15.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:65975094fef8cc68919644936764efd2ce98cf1bacbe8db2687155d2b0625bee"}, - {file = "tokenizers-0.15.0-cp311-none-win32.whl", hash = "sha256:ff5d2159c5d93015f5a4542aac6c315506df31853123aa39042672031768c301"}, - {file = "tokenizers-0.15.0-cp311-none-win_amd64.whl", hash = "sha256:2dd681b53cf615e60a31a115a3fda3980e543d25ca183797f797a6c3600788a3"}, - {file = "tokenizers-0.15.0-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:c9cce6ee149a3d703f86877bc2a6d997e34874b2d5a2d7839e36b2273f31d3d9"}, - {file = "tokenizers-0.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a0a94bc3370e6f1cc8a07a8ae867ce13b7c1b4291432a773931a61f256d44ea"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:309cfcccfc7e502cb1f1de2c9c1c94680082a65bfd3a912d5a5b2c90c677eb60"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8413e994dd7d875ab13009127fc85633916c71213917daf64962bafd488f15dc"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d0ebf9430f901dbdc3dcb06b493ff24a3644c9f88c08e6a1d6d0ae2228b9b818"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10361e9c7864b22dd791ec5126327f6c9292fb1d23481d4895780688d5e298ac"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:babe42635b8a604c594bdc56d205755f73414fce17ba8479d142a963a6c25cbc"}, - {file = "tokenizers-0.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3768829861e964c7a4556f5f23307fce6a23872c2ebf030eb9822dbbbf7e9b2a"}, - {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9c91588a630adc88065e1c03ac6831e3e2112558869b9ebcb2b8afd8a14c944d"}, - {file = "tokenizers-0.15.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:77606994e793ca54ecf3a3619adc8a906a28ca223d9354b38df41cb8766a0ed6"}, - {file = "tokenizers-0.15.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:6fe143939f3b596681922b2df12a591a5b010e7dcfbee2202482cd0c1c2f2459"}, - {file = "tokenizers-0.15.0-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:b7bee0f1795e3e3561e9a557061b1539e5255b8221e3f928f58100282407e090"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5d37e7f4439b4c46192ab4f2ff38ab815e4420f153caa13dec9272ef14403d34"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caadf255cf7f951b38d10097836d1f3bcff4aeaaffadfdf748bab780bf5bff95"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:05accb9162bf711a941b1460b743d62fec61c160daf25e53c5eea52c74d77814"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26a2ef890740127cb115ee5260878f4a677e36a12831795fd7e85887c53b430b"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e54c5f26df14913620046b33e822cb3bcd091a332a55230c0e63cc77135e2169"}, - {file = "tokenizers-0.15.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669b8ed653a578bcff919566631156f5da3aab84c66f3c0b11a6281e8b4731c7"}, - {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0ea480d943297df26f06f508dab6e012b07f42bf3dffdd36e70799368a5f5229"}, - {file = "tokenizers-0.15.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bc80a0a565ebfc7cd89de7dd581da8c2b3238addfca6280572d27d763f135f2f"}, - {file = "tokenizers-0.15.0-cp37-none-win32.whl", hash = "sha256:cdd945e678bbdf4517d5d8de66578a5030aeefecdb46f5320b034de9cad8d4dd"}, - {file = "tokenizers-0.15.0-cp37-none-win_amd64.whl", hash = "sha256:1ab96ab7dc706e002c32b2ea211a94c1c04b4f4de48354728c3a6e22401af322"}, - {file = "tokenizers-0.15.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:f21c9eb71c9a671e2a42f18b456a3d118e50c7f0fc4dd9fa8f4eb727fea529bf"}, - {file = "tokenizers-0.15.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2a5f4543a35889679fc3052086e69e81880b2a5a28ff2a52c5a604be94b77a3f"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f8aa81afec893e952bd39692b2d9ef60575ed8c86fce1fd876a06d2e73e82dca"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1574a5a4af22c3def93fe8fe4adcc90a39bf5797ed01686a4c46d1c3bc677d2f"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c7982fd0ec9e9122d03b209dac48cebfea3de0479335100ef379a9a959b9a5a"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d16b647032df2ce2c1f9097236e046ea9fedd969b25637b9d5d734d78aa53b"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b3cdf29e6f9653da330515dc8fa414be5a93aae79e57f8acc50d4028dd843edf"}, - {file = "tokenizers-0.15.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7286f3df10de840867372e3e64b99ef58c677210e3ceb653cd0e740a5c53fe78"}, - {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aabc83028baa5a36ce7a94e7659250f0309c47fa4a639e5c2c38e6d5ea0de564"}, - {file = "tokenizers-0.15.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:72f78b0e0e276b1fc14a672fa73f3acca034ba8db4e782124a2996734a9ba9cf"}, - {file = "tokenizers-0.15.0-cp38-none-win32.whl", hash = "sha256:9680b0ecc26e7e42f16680c1aa62e924d58d1c2dd992707081cc10a374896ea2"}, - {file = "tokenizers-0.15.0-cp38-none-win_amd64.whl", hash = "sha256:f17cbd88dab695911cbdd385a5a7e3709cc61dff982351f5d1b5939f074a2466"}, - {file = "tokenizers-0.15.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:3661862df7382c5eb23ac4fbf7c75e69b02dc4f5784e4c5a734db406b5b24596"}, - {file = "tokenizers-0.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c3045d191dad49647f5a5039738ecf1c77087945c7a295f7bcf051c37067e883"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9fcaad9ab0801f14457d7c820d9f246b5ab590c407fc6b073819b1573097aa7"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79f17027f24fe9485701c8dbb269b9c713954ec3bdc1e7075a66086c0c0cd3c"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:01a3aa332abc4bee7640563949fcfedca4de8f52691b3b70f2fc6ca71bfc0f4e"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05b83896a893cdfedad8785250daa3ba9f0504848323471524d4783d7291661e"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbbf2489fcf25d809731ba2744ff278dd07d9eb3f8b7482726bd6cae607073a4"}, - {file = "tokenizers-0.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab806ad521a5e9de38078b7add97589c313915f6f5fec6b2f9f289d14d607bd6"}, - {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4a522612d5c88a41563e3463226af64e2fa00629f65cdcc501d1995dd25d23f5"}, - {file = "tokenizers-0.15.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e58a38c4e6075810bdfb861d9c005236a72a152ebc7005941cc90d1bbf16aca9"}, - {file = "tokenizers-0.15.0-cp39-none-win32.whl", hash = "sha256:b8034f1041fd2bd2b84ff9f4dc4ae2e1c3b71606820a9cd5c562ebd291a396d1"}, - {file = "tokenizers-0.15.0-cp39-none-win_amd64.whl", hash = "sha256:edde9aa964145d528d0e0dbf14f244b8a85ebf276fb76869bc02e2530fa37a96"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:309445d10d442b7521b98083dc9f0b5df14eca69dbbfebeb98d781ee2cef5d30"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d3125a6499226d4d48efc54f7498886b94c418e93a205b673bc59364eecf0804"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ed56ddf0d54877bb9c6d885177db79b41576e61b5ef6defeb579dcb803c04ad5"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b22cd714706cc5b18992a232b023f736e539495f5cc61d2d28d176e55046f6c"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac2719b1e9bc8e8e7f6599b99d0a8e24f33d023eb8ef644c0366a596f0aa926"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:85ddae17570ec7e5bfaf51ffa78d044f444a8693e1316e1087ee6150596897ee"}, - {file = "tokenizers-0.15.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:76f1bed992e396bf6f83e3df97b64ff47885e45e8365f8983afed8556a0bc51f"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:3bb0f4df6dce41a1c7482087b60d18c372ef4463cb99aa8195100fcd41e0fd64"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:22c27672c27a059a5f39ff4e49feed8c7f2e1525577c8a7e3978bd428eb5869d"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78104f5d035c9991f92831fc0efe9e64a05d4032194f2a69f67aaa05a4d75bbb"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a40b73dc19d82c3e3ffb40abdaacca8fbc95eeb26c66b7f9f860aebc07a73998"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d801d1368188c74552cd779b1286e67cb9fd96f4c57a9f9a2a09b6def9e1ab37"}, - {file = "tokenizers-0.15.0-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82641ffb13a4da1293fcc9f437d457647e60ed0385a9216cd135953778b3f0a1"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:160f9d1810f2c18fffa94aa98bf17632f6bd2dabc67fcb01a698ca80c37d52ee"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:8d7d6eea831ed435fdeeb9bcd26476226401d7309d115a710c65da4088841948"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f6456bec6c557d63d8ec0023758c32f589e1889ed03c055702e84ce275488bed"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eef39a502fad3bf104b9e1906b4fb0cee20e44e755e51df9a98f8922c3bf6d4"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1e4664c5b797e093c19b794bbecc19d2367e782b4a577d8b7c1821db5dc150d"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ca003fb5f3995ff5cf676db6681b8ea5d54d3b30bea36af1120e78ee1a4a4cdf"}, - {file = "tokenizers-0.15.0-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7f17363141eb0c53752c89e10650b85ef059a52765d0802ba9613dbd2d21d425"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:8a765db05581c7d7e1280170f2888cda351760d196cc059c37ea96f121125799"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2a0dd641a72604486cd7302dd8f87a12c8a9b45e1755e47d2682733f097c1af5"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0a1a3c973e4dc97797fc19e9f11546c95278ffc55c4492acb742f69e035490bc"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4fab75642aae4e604e729d6f78e0addb9d7e7d49e28c8f4d16b24da278e5263"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65f80be77f6327a86d8fd35a4467adcfe6174c159b4ab52a1a8dd4c6f2d7d9e1"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:a8da7533dbe66b88afd430c56a2f2ce1fd82e2681868f857da38eeb3191d7498"}, - {file = "tokenizers-0.15.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa8eb4584fc6cbe6a84d7a7864be3ed28e23e9fd2146aa8ef1814d579df91958"}, - {file = "tokenizers-0.15.0.tar.gz", hash = "sha256:10c7e6e7b4cabd757da59e93f5f8d1126291d16f8b54f28510825ef56a3e5d0e"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, ] [package.dependencies] @@ -1044,13 +1044,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1075,54 +1075,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1131,4 +1134,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "a4fbe078fe6ee7b013e0f73b3b139869db33a686fe6c2abeec57f901aaebbbd2" +content-hash = "c4d03a1586b121b905ea4c0f86d04427cbb3e155e60d67c4b3351186de0d540a" diff --git a/libs/partners/anthropic/pyproject.toml b/libs/partners/anthropic/pyproject.toml index 749a3b6d27aff..2cdfaba1989d1 100644 --- a/libs/partners/anthropic/pyproject.toml +++ b/libs/partners/anthropic/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-anthropic" -version = "0.0.1.post2" +version = "0.0.2" description = "An integration package connecting AnthropicMessages and LangChain" authors = [] readme = "README.md" @@ -12,8 +12,8 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" -anthropic = "^0.8.0" +langchain-core = "^0.1" +anthropic = ">=0.16.0,<1" [tool.poetry.group.test] optional = true @@ -21,11 +21,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.3.0" freezegun = "^1.2.2" -pytest-mock = "^3.10.0" +pytest-mock = "^3.10.0" syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.codespell] optional = true @@ -41,25 +41,25 @@ ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] mypy = "^0.991" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.test_integration] optional = true [tool.poetry.group.test_integration.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] @@ -67,9 +67,7 @@ select = [ disallow_untyped_defs = "True" [tool.coverage.run] -omit = [ - "tests/*", -] +omit = ["tests/*"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/libs/partners/astradb/.gitignore b/libs/partners/astradb/.gitignore new file mode 100644 index 0000000000000..bdc93231f0353 --- /dev/null +++ b/libs/partners/astradb/.gitignore @@ -0,0 +1,5 @@ +__pycache__ +*.env +.mypy_cache +.ruff_cache +.pytest_cache \ No newline at end of file diff --git a/libs/partners/astradb/LICENSE b/libs/partners/astradb/LICENSE new file mode 100644 index 0000000000000..426b65090341f --- /dev/null +++ b/libs/partners/astradb/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/partners/astradb/Makefile b/libs/partners/astradb/Makefile new file mode 100644 index 0000000000000..ee1200c9af5e4 --- /dev/null +++ b/libs/partners/astradb/Makefile @@ -0,0 +1,66 @@ +SHELL := /bin/bash +.PHONY: all format lint test tests integration_test integration_tests spell_check help + +# Default target executed when no arguments are given to make. +all: help + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ +INTEGRATION_TEST_FILE ?= tests/integration_tests/ + +test: + poetry run pytest $(TEST_FILE) + +tests: + poetry run pytest $(TEST_FILE) + +integration_test: + poetry run pytest $(INTEGRATION_TEST_FILE) + +integration_tests: + poetry run pytest $(INTEGRATION_TEST_FILE) + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/astradb --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_astradb +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + poetry run ruff . + poetry run ruff format $(PYTHON_FILES) --diff + poetry run ruff --select I $(PYTHON_FILES) + mkdir -p $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + poetry run ruff format $(PYTHON_FILES) + poetry run ruff --select I --fix $(PYTHON_FILES) + +spell_check: + poetry run codespell --toml pyproject.toml + +spell_fix: + poetry run codespell --toml pyproject.toml -w + +check_imports: $(shell find langchain_astradb -name '*.py') + poetry run python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/astradb/README.md b/libs/partners/astradb/README.md new file mode 100644 index 0000000000000..a4c2dc84e1fb5 --- /dev/null +++ b/libs/partners/astradb/README.md @@ -0,0 +1,35 @@ +# langchain-astradb + +This package contains the LangChain integrations for using DataStax Astra DB. + +> DataStax [Astra DB](https://docs.datastax.com/en/astra/home/astra.html) is a serverless vector-capable database built on Apache Cassandraยฎ and made conveniently available +> through an easy-to-use JSON API. + +_**Note.** For a short transitional period, only some of the Astra DB integration classes are contained in this package (the remaining ones being still in `langchain-community`). In a short while, and surely by version 0.2 of LangChain, all of the Astra DB support will be removed from `langchain-community` and included in this package._ + +## Installation and Setup + +Installation of this partner package: + +```bash +pip install langchain-astradb +``` + +## Integrations overview + +### Vector Store + +```python +from langchain_astradb.vectorstores import AstraDBVectorStore + +my_store = AstraDBVectorStore( + embedding=my_embeddings, + collection_name="my_store", + api_endpoint="https://...", + token="AstraCS:...", +) +``` + +## Reference + +See the [LangChain docs page](https://python.langchain.com/docs/integrations/providers/astradb) for a more detailed listing. diff --git a/libs/partners/astradb/langchain_astradb/__init__.py b/libs/partners/astradb/langchain_astradb/__init__.py new file mode 100644 index 0000000000000..fc86dd73bcf46 --- /dev/null +++ b/libs/partners/astradb/langchain_astradb/__init__.py @@ -0,0 +1,5 @@ +from langchain_astradb.vectorstores import AstraDBVectorStore + +__all__ = [ + "AstraDBVectorStore", +] diff --git a/libs/partners/astradb/langchain_astradb/py.typed b/libs/partners/astradb/langchain_astradb/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/astradb/langchain_astradb/utils/mmr.py b/libs/partners/astradb/langchain_astradb/utils/mmr.py new file mode 100644 index 0000000000000..feb34ad1c23d6 --- /dev/null +++ b/libs/partners/astradb/langchain_astradb/utils/mmr.py @@ -0,0 +1,87 @@ +""" +Tools for the Maximal Marginal Relevance (MMR) reranking. +Duplicated from langchain_community to avoid cross-dependencies. + +Functions "maximal_marginal_relevance" and "cosine_similarity" +are duplicated in this utility respectively from modules: + - "libs/community/langchain_community/vectorstores/utils.py" + - "libs/community/langchain_community/utils/math.py" +""" + +import logging +from typing import List, Union + +import numpy as np + +logger = logging.getLogger(__name__) + +Matrix = Union[List[List[float]], List[np.ndarray], np.ndarray] + + +def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray: + """Row-wise cosine similarity between two equal-width matrices.""" + if len(X) == 0 or len(Y) == 0: + return np.array([]) + + X = np.array(X) + Y = np.array(Y) + if X.shape[1] != Y.shape[1]: + raise ValueError( + f"Number of columns in X and Y must be the same. X has shape {X.shape} " + f"and Y has shape {Y.shape}." + ) + try: + import simsimd as simd # type: ignore + + X = np.array(X, dtype=np.float32) + Y = np.array(Y, dtype=np.float32) + Z = 1 - simd.cdist(X, Y, metric="cosine") + if isinstance(Z, float): + return np.array([Z]) + return Z + except ImportError: + logger.info( + "Unable to import simsimd, defaulting to NumPy implementation. If you want " + "to use simsimd please install with `pip install simsimd`." + ) + X_norm = np.linalg.norm(X, axis=1) + Y_norm = np.linalg.norm(Y, axis=1) + # Ignore divide by zero errors run time warnings as those are handled below. + with np.errstate(divide="ignore", invalid="ignore"): + similarity = np.dot(X, Y.T) / np.outer(X_norm, Y_norm) + similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0 + return similarity + + +def maximal_marginal_relevance( + query_embedding: np.ndarray, + embedding_list: list, + lambda_mult: float = 0.5, + k: int = 4, +) -> List[int]: + """Calculate maximal marginal relevance.""" + if min(k, len(embedding_list)) <= 0: + return [] + if query_embedding.ndim == 1: + query_embedding = np.expand_dims(query_embedding, axis=0) + similarity_to_query = cosine_similarity(query_embedding, embedding_list)[0] + most_similar = int(np.argmax(similarity_to_query)) + idxs = [most_similar] + selected = np.array([embedding_list[most_similar]]) + while len(idxs) < min(k, len(embedding_list)): + best_score = -np.inf + idx_to_add = -1 + similarity_to_selected = cosine_similarity(embedding_list, selected) + for i, query_score in enumerate(similarity_to_query): + if i in idxs: + continue + redundant_score = max(similarity_to_selected[i]) + equation_score = ( + lambda_mult * query_score - (1 - lambda_mult) * redundant_score + ) + if equation_score > best_score: + best_score = equation_score + idx_to_add = i + idxs.append(idx_to_add) + selected = np.append(selected, [embedding_list[idx_to_add]], axis=0) + return idxs diff --git a/libs/partners/astradb/langchain_astradb/vectorstores/__init__.py b/libs/partners/astradb/langchain_astradb/vectorstores/__init__.py new file mode 100644 index 0000000000000..310732d125ff9 --- /dev/null +++ b/libs/partners/astradb/langchain_astradb/vectorstores/__init__.py @@ -0,0 +1,5 @@ +from langchain_astradb.vectorstores.astradb import AstraDBVectorStore + +__all__ = [ + "AstraDBVectorStore", +] diff --git a/libs/partners/astradb/langchain_astradb/vectorstores/astradb.py b/libs/partners/astradb/langchain_astradb/vectorstores/astradb.py new file mode 100644 index 0000000000000..e501113e3083a --- /dev/null +++ b/libs/partners/astradb/langchain_astradb/vectorstores/astradb.py @@ -0,0 +1,1317 @@ +from __future__ import annotations + +import asyncio +import uuid +import warnings +from asyncio import Task +from concurrent.futures import ThreadPoolExecutor +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + Optional, + Set, + Tuple, + Type, + TypeVar, + cast, +) + +import numpy as np +from astrapy.db import ( + AstraDB as AstraDBClient, +) +from astrapy.db import ( + AstraDBCollection, + AsyncAstraDBCollection, +) +from astrapy.db import ( + AsyncAstraDB as AsyncAstraDBClient, +) +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings +from langchain_core.runnables import run_in_executor +from langchain_core.runnables.utils import gather_with_concurrency +from langchain_core.utils.iter import batch_iterate +from langchain_core.vectorstores import VectorStore + +from langchain_astradb.utils.mmr import maximal_marginal_relevance + +T = TypeVar("T") +U = TypeVar("U") +DocDict = Dict[str, Any] # dicts expressing entries to insert + +# Batch/concurrency default values (if parameters not provided): +# Size of batches for bulk insertions: +# (20 is the max batch size for the HTTP API at the time of writing) +DEFAULT_BATCH_SIZE = 20 +# Number of threads to insert batches concurrently: +DEFAULT_BULK_INSERT_BATCH_CONCURRENCY = 16 +# Number of threads in a batch to insert pre-existing entries: +DEFAULT_BULK_INSERT_OVERWRITE_CONCURRENCY = 10 +# Number of threads (for deleting multiple rows concurrently): +DEFAULT_BULK_DELETE_CONCURRENCY = 20 + + +def _unique_list(lst: List[T], key: Callable[[T], U]) -> List[T]: + visited_keys: Set[U] = set() + new_lst = [] + for item in lst: + item_key = key(item) + if item_key not in visited_keys: + visited_keys.add(item_key) + new_lst.append(item) + return new_lst + + +class AstraDBVectorStore(VectorStore): + """Wrapper around DataStax Astra DB for vector-store workloads. + + For quickstart and details, visit: + docs.datastax.com/en/astra/home/astra.html + + Example: + .. code-block:: python + + from langchain_astradb.vectorstores import AstraDBVectorStore + from langchain_openai.embeddings import OpenAIEmbeddings + + embeddings = OpenAIEmbeddings() + vectorstore = AstraDBVectorStore( + embedding=embeddings, + collection_name="my_store", + token="AstraCS:...", + api_endpoint="https://-.apps.astra.datastax.com" + ) + + vectorstore.add_texts(["Giraffes", "All good here"]) + results = vectorstore.similarity_search("Everything's ok", k=1) + + Constructor Args (only keyword-arguments accepted): + embedding (Embeddings): embedding function to use. + collection_name (str): name of the Astra DB collection to create/use. + token (Optional[str]): API token for Astra DB usage. + api_endpoint (Optional[str]): full URL to the API endpoint, + such as "https://-us-east1.apps.astra.datastax.com". + astra_db_client (Optional[astrapy.db.AstraDB]): + *alternative to token+api_endpoint*, + you can pass an already-created 'astrapy.db.AstraDB' instance. + async_astra_db_client (Optional[astrapy.db.AsyncAstraDB]): + same as `astra_db_client`, but the basis for the async API + of the vector store. + namespace (Optional[str]): namespace (aka keyspace) where the + collection is created. Defaults to the database's "default namespace". + metric (Optional[str]): similarity function to use out of those + available in Astra DB. If left out, it will use Astra DB API's + defaults (i.e. "cosine" - but, for performance reasons, + "dot_product" is suggested if embeddings are normalized to one). + + Advanced arguments (coming with sensible defaults): + batch_size (Optional[int]): Size of batches for bulk insertions. + bulk_insert_batch_concurrency (Optional[int]): Number of threads + to insert batches concurrently. + bulk_insert_overwrite_concurrency (Optional[int]): Number of + threads in a batch to insert pre-existing entries. + bulk_delete_concurrency (Optional[int]): Number of threads + (for deleting multiple rows concurrently). + pre_delete_collection (Optional[bool]): whether to delete the collection + before creating it. If False and the collection already exists, + the collection will be used as is. + + A note on concurrency: as a rule of thumb, on a typical client machine + it is suggested to keep the quantity + bulk_insert_batch_concurrency * bulk_insert_overwrite_concurrency + much below 1000 to avoid exhausting the client multithreading/networking + resources. The hardcoded defaults are somewhat conservative to meet + most machines' specs, but a sensible choice to test may be: + bulk_insert_batch_concurrency = 80 + bulk_insert_overwrite_concurrency = 10 + A bit of experimentation is required to nail the best results here, + depending on both the machine/network specs and the expected workload + (specifically, how often a write is an update of an existing id). + Remember you can pass concurrency settings to individual calls to + add_texts and add_documents as well. + + A note on passing astra_db_client and/or async_astra_db_client instead + of the credentials (token, api_endpoint): + - if you pass only the async client when creating the store, + the sync methods will error when called. + - conversely, if you pass only the sync client, the async methods will + still be available, but will be wrapping its sync counterpart + in a `run_in_executor` construct instead of using the native async. + """ + + @staticmethod + def _filter_to_metadata(filter_dict: Optional[Dict[str, Any]]) -> Dict[str, Any]: + if filter_dict is None: + return {} + else: + metadata_filter = {} + for k, v in filter_dict.items(): + if k and k[0] == "$": + if isinstance(v, list): + metadata_filter[k] = [ + AstraDBVectorStore._filter_to_metadata(f) for f in v + ] + else: + # assume each list item can be fed back to this function + metadata_filter[k] = AstraDBVectorStore._filter_to_metadata(v) # type: ignore[assignment] + else: + metadata_filter[f"metadata.{k}"] = v + + return metadata_filter + + def __init__( + self, + *, + embedding: Embeddings, + collection_name: str, + token: Optional[str] = None, + api_endpoint: Optional[str] = None, + astra_db_client: Optional[AstraDBClient] = None, + async_astra_db_client: Optional[AsyncAstraDBClient] = None, + namespace: Optional[str] = None, + metric: Optional[str] = None, + batch_size: Optional[int] = None, + bulk_insert_batch_concurrency: Optional[int] = None, + bulk_insert_overwrite_concurrency: Optional[int] = None, + bulk_delete_concurrency: Optional[int] = None, + pre_delete_collection: bool = False, + ) -> None: + """ + Create an AstraDBVectorStore vector store object. See class docstring for help. + """ + + # Conflicting-arg checks: + if astra_db_client is not None or async_astra_db_client is not None: + if token is not None or api_endpoint is not None: + raise ValueError( + "You cannot pass 'astra_db_client' or 'async_astra_db_client' to " + "AstraDBVectorStore if passing 'token' and 'api_endpoint'." + ) + + self.embedding = embedding + self.collection_name = collection_name + self.token = token + self.api_endpoint = api_endpoint + self.namespace = namespace + # Concurrency settings + self.batch_size: int = batch_size or DEFAULT_BATCH_SIZE + self.bulk_insert_batch_concurrency: int = ( + bulk_insert_batch_concurrency or DEFAULT_BULK_INSERT_BATCH_CONCURRENCY + ) + self.bulk_insert_overwrite_concurrency: int = ( + bulk_insert_overwrite_concurrency + or DEFAULT_BULK_INSERT_OVERWRITE_CONCURRENCY + ) + self.bulk_delete_concurrency: int = ( + bulk_delete_concurrency or DEFAULT_BULK_DELETE_CONCURRENCY + ) + # "vector-related" settings + self._embedding_dimension: Optional[int] = None + self.metric = metric + + self.astra_db = astra_db_client + self.async_astra_db = async_astra_db_client + self.collection = None + self.async_collection = None + + if token and api_endpoint: + self.astra_db = AstraDBClient( + token=cast(str, self.token), + api_endpoint=cast(str, self.api_endpoint), + namespace=self.namespace, + ) + self.async_astra_db = AsyncAstraDBClient( + token=cast(str, self.token), + api_endpoint=cast(str, self.api_endpoint), + namespace=self.namespace, + ) + + if self.astra_db is not None: + self.collection = AstraDBCollection( + collection_name=self.collection_name, + astra_db=self.astra_db, + ) + + self.async_setup_db_task: Optional[Task] = None + if self.async_astra_db is not None: + self.async_collection = AsyncAstraDBCollection( + collection_name=self.collection_name, + astra_db=self.async_astra_db, + ) + try: + asyncio.get_running_loop() + self.async_setup_db_task = asyncio.create_task( + self._setup_db(pre_delete_collection) + ) + except RuntimeError: + pass + + if self.async_setup_db_task is None: + if not pre_delete_collection: + self._provision_collection() + else: + self.clear() + + def _ensure_astra_db_client(self) -> None: + """ + If no error is raised, that means self.collection + is also not None (as per constructor flow). + """ + if not self.astra_db: + raise ValueError("Missing AstraDB client") + + async def _setup_db(self, pre_delete_collection: bool) -> None: + if pre_delete_collection: + # _setup_db is called from the constructor only, from a place + # where async_astra_db is not None for sure + await self.async_astra_db.delete_collection( # type: ignore[union-attr] + collection_name=self.collection_name, + ) + await self._aprovision_collection() + + async def _ensure_db_setup(self) -> None: + if self.async_setup_db_task: + await self.async_setup_db_task + + def _get_embedding_dimension(self) -> int: + if self._embedding_dimension is None: + self._embedding_dimension = len( + self.embedding.embed_query("This is a sample sentence.") + ) + return self._embedding_dimension + + def _provision_collection(self) -> None: + """ + Run the API invocation to create the collection on the backend. + + Internal-usage method, no object members are set, + other than working on the underlying actual storage. + """ + self._ensure_astra_db_client() + # self.astra_db is not None (by _ensure_astra_db_client) + self.astra_db.create_collection( # type: ignore[union-attr] + dimension=self._get_embedding_dimension(), + collection_name=self.collection_name, + metric=self.metric, + ) + + async def _aprovision_collection(self) -> None: + """ + Run the API invocation to create the collection on the backend. + + Internal-usage method, no object members are set, + other than working on the underlying actual storage. + """ + if not self.async_astra_db: + await run_in_executor(None, self._provision_collection) + else: + await self.async_astra_db.create_collection( + dimension=self._get_embedding_dimension(), + collection_name=self.collection_name, + metric=self.metric, + ) + + @property + def embeddings(self) -> Embeddings: + return self.embedding + + @staticmethod + def _dont_flip_the_cos_score(similarity0to1: float) -> float: + """Keep similarity from client unchanged ad it's in [0:1] already.""" + return similarity0to1 + + def _select_relevance_score_fn(self) -> Callable[[float], float]: + """ + The underlying API calls already returns a "score proper", + i.e. one in [0, 1] where higher means more *similar*, + so here the final score transformation is not reversing the interval: + """ + return self._dont_flip_the_cos_score + + def clear(self) -> None: + """Empty the collection of all its stored entries.""" + self._ensure_astra_db_client() + # self.collection is not None (by _ensure_astra_db_client) + self.collection.delete_many(filter={}) # type: ignore[union-attr] + + async def aclear(self) -> None: + """Empty the collection of all its stored entries.""" + await self._ensure_db_setup() + if not self.async_astra_db: + return await run_in_executor(None, self.clear) + else: + # async_collection not None if so is async_astra_db (constr. flow) + await self.async_collection.delete_many({}) # type: ignore[union-attr] + + def delete_by_document_id(self, document_id: str) -> bool: + """ + Remove a single document from the store, given its document_id (str). + Return True if a document has indeed been deleted, False if ID not found. + """ + self._ensure_astra_db_client() + # self.collection is not None (by _ensure_astra_db_client) + deletion_response = self.collection.delete_one(document_id) # type: ignore[union-attr] + return ((deletion_response or {}).get("status") or {}).get( + "deletedCount", 0 + ) == 1 + + async def adelete_by_document_id(self, document_id: str) -> bool: + """ + Remove a single document from the store, given its document_id (str). + Return True if a document has indeed been deleted, False if ID not found. + """ + await self._ensure_db_setup() + if not self.async_collection: + return await run_in_executor(None, self.delete_by_document_id, document_id) + deletion_response = await self.async_collection.delete_one(document_id) + return ((deletion_response or {}).get("status") or {}).get( + "deletedCount", 0 + ) == 1 + + def delete( + self, + ids: Optional[List[str]] = None, + concurrency: Optional[int] = None, + **kwargs: Any, + ) -> Optional[bool]: + """Delete by vector ids. + + Args: + ids (Optional[List[str]]): List of ids to delete. + concurrency (Optional[int]): max number of threads issuing + single-doc delete requests. Defaults to instance-level setting. + + Returns: + Optional[bool]: True if deletion is successful, + False otherwise, None if not implemented. + """ + + if kwargs: + warnings.warn( + "Method 'delete' of AstraDBVectorStore vector store invoked with " + f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " + "which will be ignored." + ) + + if ids is None: + raise ValueError("No ids provided to delete.") + + _max_workers = concurrency or self.bulk_delete_concurrency + with ThreadPoolExecutor(max_workers=_max_workers) as tpe: + _ = list( + tpe.map( + self.delete_by_document_id, + ids, + ) + ) + return True + + async def adelete( + self, + ids: Optional[List[str]] = None, + concurrency: Optional[int] = None, + **kwargs: Any, + ) -> Optional[bool]: + """Delete by vector ID or other criteria. + + Args: + ids: List of ids to delete. + concurrency (Optional[int]): max number of concurrent delete queries. + Defaults to instance-level setting. + **kwargs: Other keyword arguments that subclasses might use. + + Returns: + Optional[bool]: True if deletion is successful, + False otherwise, None if not implemented. + """ + if kwargs: + warnings.warn( + "Method 'adelete' of AstraDBVectorStore invoked with " + f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " + "which will be ignored." + ) + + if ids is None: + raise ValueError("No ids provided to delete.") + + return all( + await gather_with_concurrency( + concurrency, *[self.adelete_by_document_id(doc_id) for doc_id in ids] + ) + ) + + def delete_collection(self) -> None: + """ + Completely delete the collection from the database (as opposed + to 'clear()', which empties it only). + Stored data is lost and unrecoverable, resources are freed. + Use with caution. + """ + self._ensure_astra_db_client() + # self.astra_db is not None (by _ensure_astra_db_client) + self.astra_db.delete_collection( # type: ignore[union-attr] + collection_name=self.collection_name, + ) + + async def adelete_collection(self) -> None: + """ + Completely delete the collection from the database (as opposed + to 'clear()', which empties it only). + Stored data is lost and unrecoverable, resources are freed. + Use with caution. + """ + await self._ensure_db_setup() + if not self.async_astra_db: + return await run_in_executor(None, self.delete_collection) + else: + await self.async_astra_db.delete_collection( + collection_name=self.collection_name, + ) + + @staticmethod + def _get_documents_to_insert( + texts: Iterable[str], + embedding_vectors: List[List[float]], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + ) -> List[DocDict]: + if ids is None: + ids = [uuid.uuid4().hex for _ in texts] + if metadatas is None: + metadatas = [{} for _ in texts] + # + documents_to_insert = [ + { + "content": b_txt, + "_id": b_id, + "$vector": b_emb, + "metadata": b_md, + } + for b_txt, b_emb, b_id, b_md in zip( + texts, + embedding_vectors, + ids, + metadatas, + ) + ] + # make unique by id, keeping the last + uniqued_documents_to_insert = _unique_list( + documents_to_insert[::-1], + lambda document: document["_id"], + )[::-1] + return uniqued_documents_to_insert + + @staticmethod + def _get_missing_from_batch( + document_batch: List[DocDict], insert_result: Dict[str, Any] + ) -> Tuple[List[str], List[DocDict]]: + if "status" not in insert_result: + raise ValueError( + f"API Exception while running bulk insertion: {str(insert_result)}" + ) + batch_inserted = insert_result["status"]["insertedIds"] + # estimation of the preexisting documents that failed + missed_inserted_ids = {document["_id"] for document in document_batch} - set( + batch_inserted + ) + errors = insert_result.get("errors", []) + # careful for other sources of error other than "doc already exists" + num_errors = len(errors) + unexpected_errors = any( + error.get("errorCode") != "DOCUMENT_ALREADY_EXISTS" for error in errors + ) + if num_errors != len(missed_inserted_ids) or unexpected_errors: + raise ValueError( + f"API Exception while running bulk insertion: {str(errors)}" + ) + # deal with the missing insertions as upserts + missing_from_batch = [ + document + for document in document_batch + if document["_id"] in missed_inserted_ids + ] + return batch_inserted, missing_from_batch + + def add_texts( + self, + texts: Iterable[str], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + *, + batch_size: Optional[int] = None, + batch_concurrency: Optional[int] = None, + overwrite_concurrency: Optional[int] = None, + **kwargs: Any, + ) -> List[str]: + """Run texts through the embeddings and add them to the vectorstore. + + If passing explicit ids, those entries whose id is in the store already + will be replaced. + + Args: + texts (Iterable[str]): Texts to add to the vectorstore. + metadatas (Optional[List[dict]], optional): Optional list of metadatas. + ids (Optional[List[str]], optional): Optional list of ids. + batch_size (Optional[int]): Number of documents in each API call. + Check the underlying Astra DB HTTP API specs for the max value + (20 at the time of writing this). If not provided, defaults + to the instance-level setting. + batch_concurrency (Optional[int]): number of threads to process + insertion batches concurrently. Defaults to instance-level + setting if not provided. + overwrite_concurrency (Optional[int]): number of threads to process + pre-existing documents in each batch (which require individual + API calls). Defaults to instance-level setting if not provided. + + A note on metadata: there are constraints on the allowed field names + in this dictionary, coming from the underlying Astra DB API. + For instance, the `$` (dollar sign) cannot be used in the dict keys. + See this document for details: + docs.datastax.com/en/astra-serverless/docs/develop/dev-with-json.html + + Returns: + List[str]: List of ids of the added texts. + """ + + if kwargs: + warnings.warn( + "Method 'add_texts' of AstraDBVectorStore vector store invoked with " + f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " + "which will be ignored." + ) + self._ensure_astra_db_client() + + embedding_vectors = self.embedding.embed_documents(list(texts)) + documents_to_insert = self._get_documents_to_insert( + texts, embedding_vectors, metadatas, ids + ) + + def _handle_batch(document_batch: List[DocDict]) -> List[str]: + # self.collection is not None (by _ensure_astra_db_client) + im_result = self.collection.insert_many( # type: ignore[union-attr] + documents=document_batch, + options={"ordered": False}, + partial_failures_allowed=True, + ) + batch_inserted, missing_from_batch = self._get_missing_from_batch( + document_batch, im_result + ) + + def _handle_missing_document(missing_document: DocDict) -> str: + # self.collection is not None (by _ensure_astra_db_client) + replacement_result = self.collection.find_one_and_replace( # type: ignore[union-attr] + filter={"_id": missing_document["_id"]}, + replacement=missing_document, + ) + return replacement_result["data"]["document"]["_id"] + + _u_max_workers = ( + overwrite_concurrency or self.bulk_insert_overwrite_concurrency + ) + with ThreadPoolExecutor(max_workers=_u_max_workers) as tpe2: + batch_replaced = list( + tpe2.map( + _handle_missing_document, + missing_from_batch, + ) + ) + return batch_inserted + batch_replaced + + _b_max_workers = batch_concurrency or self.bulk_insert_batch_concurrency + with ThreadPoolExecutor(max_workers=_b_max_workers) as tpe: + all_ids_nested = tpe.map( + _handle_batch, + batch_iterate( + batch_size or self.batch_size, + documents_to_insert, + ), + ) + return [iid for id_list in all_ids_nested for iid in id_list] + + async def aadd_texts( + self, + texts: Iterable[str], + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + *, + batch_size: Optional[int] = None, + batch_concurrency: Optional[int] = None, + overwrite_concurrency: Optional[int] = None, + **kwargs: Any, + ) -> List[str]: + """Run texts through the embeddings and add them to the vectorstore. + + If passing explicit ids, those entries whose id is in the store already + will be replaced. + + Args: + texts (Iterable[str]): Texts to add to the vectorstore. + metadatas (Optional[List[dict]], optional): Optional list of metadatas. + ids (Optional[List[str]], optional): Optional list of ids. + batch_size (Optional[int]): Number of documents in each API call. + Check the underlying Astra DB HTTP API specs for the max value + (20 at the time of writing this). If not provided, defaults + to the instance-level setting. + batch_concurrency (Optional[int]): number of concurrent batch insertions. + Defaults to instance-level setting if not provided. + overwrite_concurrency (Optional[int]): number of concurrent API calls to + process pre-existing documents in each batch. + Defaults to instance-level setting if not provided. + + A note on metadata: there are constraints on the allowed field names + in this dictionary, coming from the underlying Astra DB API. + For instance, the `$` (dollar sign) cannot be used in the dict keys. + See this document for details: + docs.datastax.com/en/astra-serverless/docs/develop/dev-with-json.html + + Returns: + List[str]: List of ids of the added texts. + """ + await self._ensure_db_setup() + if not self.async_collection: + return await super().aadd_texts( + texts, + metadatas, + ids=ids, + batch_size=batch_size, + batch_concurrency=batch_concurrency, + overwrite_concurrency=overwrite_concurrency, + ) + else: + if kwargs: + warnings.warn( + "Method 'aadd_texts' of AstraDBVectorStore invoked with " + f"unsupported arguments ({', '.join(sorted(kwargs.keys()))}), " + "which will be ignored." + ) + + embedding_vectors = await self.embedding.aembed_documents(list(texts)) + documents_to_insert = self._get_documents_to_insert( + texts, embedding_vectors, metadatas, ids + ) + + async def _handle_batch(document_batch: List[DocDict]) -> List[str]: + # self.async_collection is not None here for sure + im_result = await self.async_collection.insert_many( # type: ignore[union-attr] + documents=document_batch, + options={"ordered": False}, + partial_failures_allowed=True, + ) + batch_inserted, missing_from_batch = self._get_missing_from_batch( + document_batch, im_result + ) + + async def _handle_missing_document(missing_document: DocDict) -> str: + # self.async_collection is not None here for sure + replacement_result = ( + await self.async_collection.find_one_and_replace( # type: ignore[union-attr] + filter={"_id": missing_document["_id"]}, + replacement=missing_document, + ) + ) + return replacement_result["data"]["document"]["_id"] + + _u_max_workers = ( + overwrite_concurrency or self.bulk_insert_overwrite_concurrency + ) + batch_replaced = await gather_with_concurrency( + _u_max_workers, + *[_handle_missing_document(doc) for doc in missing_from_batch], + ) + return batch_inserted + batch_replaced + + _b_max_workers = batch_concurrency or self.bulk_insert_batch_concurrency + all_ids_nested = await gather_with_concurrency( + _b_max_workers, + *[ + _handle_batch(batch) + for batch in batch_iterate( + batch_size or self.batch_size, + documents_to_insert, + ) + ], + ) + + return [iid for id_list in all_ids_nested for iid in id_list] + + def similarity_search_with_score_id_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float, str]]: + """Return docs most similar to embedding vector. + + Args: + embedding (str): Embedding to look up documents similar to. + k (int): Number of Documents to return. Defaults to 4. + Returns: + List of (Document, score, id), the most similar to the query vector. + """ + self._ensure_astra_db_client() + metadata_parameter = self._filter_to_metadata(filter) + # + hits = list( + # self.collection is not None (by _ensure_astra_db_client) + self.collection.paginated_find( # type: ignore[union-attr] + filter=metadata_parameter, + sort={"$vector": embedding}, + options={"limit": k, "includeSimilarity": True}, + projection={ + "_id": 1, + "content": 1, + "metadata": 1, + }, + ) + ) + # + return [ + ( + Document( + page_content=hit["content"], + metadata=hit["metadata"], + ), + hit["$similarity"], + hit["_id"], + ) + for hit in hits + ] + + async def asimilarity_search_with_score_id_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float, str]]: + """Return docs most similar to embedding vector. + + Args: + embedding (str): Embedding to look up documents similar to. + k (int): Number of Documents to return. Defaults to 4. + Returns: + List of (Document, score, id), the most similar to the query vector. + """ + await self._ensure_db_setup() + if not self.async_collection: + return await run_in_executor( + None, + self.similarity_search_with_score_id_by_vector, + embedding, + k, + filter, + ) + metadata_parameter = self._filter_to_metadata(filter) + # + return [ + ( + Document( + page_content=hit["content"], + metadata=hit["metadata"], + ), + hit["$similarity"], + hit["_id"], + ) + async for hit in self.async_collection.paginated_find( + filter=metadata_parameter, + sort={"$vector": embedding}, + options={"limit": k, "includeSimilarity": True}, + projection={ + "_id": 1, + "content": 1, + "metadata": 1, + }, + ) + ] + + def similarity_search_with_score_id( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float, str]]: + embedding_vector = self.embedding.embed_query(query) + return self.similarity_search_with_score_id_by_vector( + embedding=embedding_vector, + k=k, + filter=filter, + ) + + async def asimilarity_search_with_score_id( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float, str]]: + embedding_vector = await self.embedding.aembed_query(query) + return await self.asimilarity_search_with_score_id_by_vector( + embedding=embedding_vector, + k=k, + filter=filter, + ) + + def similarity_search_with_score_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float]]: + """Return docs most similar to embedding vector. + + Args: + embedding (str): Embedding to look up documents similar to. + k (int): Number of Documents to return. Defaults to 4. + Returns: + List of (Document, score), the most similar to the query vector. + """ + return [ + (doc, score) + for (doc, score, doc_id) in self.similarity_search_with_score_id_by_vector( + embedding=embedding, + k=k, + filter=filter, + ) + ] + + async def asimilarity_search_with_score_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float]]: + """Return docs most similar to embedding vector. + + Args: + embedding (str): Embedding to look up documents similar to. + k (int): Number of Documents to return. Defaults to 4. + Returns: + List of (Document, score), the most similar to the query vector. + """ + return [ + (doc, score) + for ( + doc, + score, + doc_id, + ) in await self.asimilarity_search_with_score_id_by_vector( + embedding=embedding, + k=k, + filter=filter, + ) + ] + + def similarity_search( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + embedding_vector = self.embedding.embed_query(query) + return self.similarity_search_by_vector( + embedding_vector, + k, + filter=filter, + ) + + async def asimilarity_search( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + embedding_vector = await self.embedding.aembed_query(query) + return await self.asimilarity_search_by_vector( + embedding_vector, + k, + filter=filter, + ) + + def similarity_search_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + return [ + doc + for doc, _ in self.similarity_search_with_score_by_vector( + embedding, + k, + filter=filter, + ) + ] + + async def asimilarity_search_by_vector( + self, + embedding: List[float], + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + return [ + doc + for doc, _ in await self.asimilarity_search_with_score_by_vector( + embedding, + k, + filter=filter, + ) + ] + + def similarity_search_with_score( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float]]: + embedding_vector = self.embedding.embed_query(query) + return self.similarity_search_with_score_by_vector( + embedding_vector, + k, + filter=filter, + ) + + async def asimilarity_search_with_score( + self, + query: str, + k: int = 4, + filter: Optional[Dict[str, Any]] = None, + ) -> List[Tuple[Document, float]]: + embedding_vector = await self.embedding.aembed_query(query) + return await self.asimilarity_search_with_score_by_vector( + embedding_vector, + k, + filter=filter, + ) + + @staticmethod + def _get_mmr_hits( + embedding: List[float], k: int, lambda_mult: float, prefetch_hits: List[DocDict] + ) -> List[Document]: + mmr_chosen_indices = maximal_marginal_relevance( + np.array(embedding, dtype=np.float32), + [prefetch_hit["$vector"] for prefetch_hit in prefetch_hits], + k=k, + lambda_mult=lambda_mult, + ) + mmr_hits = [ + prefetch_hit + for prefetch_index, prefetch_hit in enumerate(prefetch_hits) + if prefetch_index in mmr_chosen_indices + ] + return [ + Document( + page_content=hit["content"], + metadata=hit["metadata"], + ) + for hit in mmr_hits + ] + + def max_marginal_relevance_search_by_vector( + self, + embedding: List[float], + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + Args: + embedding: Embedding to look up documents similar to. + k: Number of Documents to return. + fetch_k: Number of Documents to fetch to pass to MMR algorithm. + lambda_mult: Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Returns: + List of Documents selected by maximal marginal relevance. + """ + self._ensure_astra_db_client() + metadata_parameter = self._filter_to_metadata(filter) + + prefetch_hits = list( + # self.collection is not None (by _ensure_astra_db_client) + self.collection.paginated_find( # type: ignore[union-attr] + filter=metadata_parameter, + sort={"$vector": embedding}, + options={"limit": fetch_k, "includeSimilarity": True}, + projection={ + "_id": 1, + "content": 1, + "metadata": 1, + "$vector": 1, + }, + ) + ) + + return self._get_mmr_hits(embedding, k, lambda_mult, prefetch_hits) + + async def amax_marginal_relevance_search_by_vector( + self, + embedding: List[float], + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + Args: + embedding: Embedding to look up documents similar to. + k: Number of Documents to return. + fetch_k: Number of Documents to fetch to pass to MMR algorithm. + lambda_mult: Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Returns: + List of Documents selected by maximal marginal relevance. + """ + await self._ensure_db_setup() + if not self.async_collection: + return await run_in_executor( + None, + self.max_marginal_relevance_search_by_vector, + embedding, + k, + fetch_k, + lambda_mult, + filter, + **kwargs, + ) + metadata_parameter = self._filter_to_metadata(filter) + + prefetch_hits = [ + hit + async for hit in self.async_collection.paginated_find( + filter=metadata_parameter, + sort={"$vector": embedding}, + options={"limit": fetch_k, "includeSimilarity": True}, + projection={ + "_id": 1, + "content": 1, + "metadata": 1, + "$vector": 1, + }, + ) + ] + + return self._get_mmr_hits(embedding, k, lambda_mult, prefetch_hits) + + def max_marginal_relevance_search( + self, + query: str, + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + Args: + query (str): Text to look up documents similar to. + k (int = 4): Number of Documents to return. + fetch_k (int = 20): Number of Documents to fetch to pass to MMR algorithm. + lambda_mult (float = 0.5): Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Optional. + Returns: + List of Documents selected by maximal marginal relevance. + """ + embedding_vector = self.embedding.embed_query(query) + return self.max_marginal_relevance_search_by_vector( + embedding_vector, + k, + fetch_k, + lambda_mult=lambda_mult, + filter=filter, + ) + + async def amax_marginal_relevance_search( + self, + query: str, + k: int = 4, + fetch_k: int = 20, + lambda_mult: float = 0.5, + filter: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> List[Document]: + """Return docs selected using the maximal marginal relevance. + Maximal marginal relevance optimizes for similarity to query AND diversity + among selected documents. + Args: + query (str): Text to look up documents similar to. + k (int = 4): Number of Documents to return. + fetch_k (int = 20): Number of Documents to fetch to pass to MMR algorithm. + lambda_mult (float = 0.5): Number between 0 and 1 that determines the degree + of diversity among the results with 0 corresponding + to maximum diversity and 1 to minimum diversity. + Optional. + Returns: + List of Documents selected by maximal marginal relevance. + """ + embedding_vector = await self.embedding.aembed_query(query) + return await self.amax_marginal_relevance_search_by_vector( + embedding_vector, + k, + fetch_k, + lambda_mult=lambda_mult, + filter=filter, + ) + + @classmethod + def _from_kwargs( + cls: Type[AstraDBVectorStore], + embedding: Embeddings, + **kwargs: Any, + ) -> AstraDBVectorStore: + known_kwargs = { + "collection_name", + "token", + "api_endpoint", + "astra_db_client", + "async_astra_db_client", + "namespace", + "metric", + "batch_size", + "bulk_insert_batch_concurrency", + "bulk_insert_overwrite_concurrency", + "bulk_delete_concurrency", + "batch_concurrency", + "overwrite_concurrency", + } + if kwargs: + unknown_kwargs = set(kwargs.keys()) - known_kwargs + if unknown_kwargs: + warnings.warn( + "Method 'from_texts' of AstraDBVectorStore vector store " + "invoked with unsupported arguments " + f"({', '.join(sorted(unknown_kwargs))}), " + "which will be ignored." + ) + + collection_name: str = kwargs["collection_name"] + token = kwargs.get("token") + api_endpoint = kwargs.get("api_endpoint") + astra_db_client = kwargs.get("astra_db_client") + async_astra_db_client = kwargs.get("async_astra_db_client") + namespace = kwargs.get("namespace") + metric = kwargs.get("metric") + + return cls( + embedding=embedding, + collection_name=collection_name, + token=token, + api_endpoint=api_endpoint, + astra_db_client=astra_db_client, + async_astra_db_client=async_astra_db_client, + namespace=namespace, + metric=metric, + batch_size=kwargs.get("batch_size"), + bulk_insert_batch_concurrency=kwargs.get("bulk_insert_batch_concurrency"), + bulk_insert_overwrite_concurrency=kwargs.get( + "bulk_insert_overwrite_concurrency" + ), + bulk_delete_concurrency=kwargs.get("bulk_delete_concurrency"), + ) + + @classmethod + def from_texts( + cls: Type[AstraDBVectorStore], + texts: List[str], + embedding: Embeddings, + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> AstraDBVectorStore: + """Create an Astra DB vectorstore from raw texts. + + Args: + texts (List[str]): the texts to insert. + embedding (Embeddings): the embedding function to use in the store. + metadatas (Optional[List[dict]]): metadata dicts for the texts. + ids (Optional[List[str]]): ids to associate to the texts. + *Additional arguments*: you can pass any argument that you would + to 'add_texts' and/or to the 'AstraDBVectorStore' constructor + (see these methods for details). These arguments will be + routed to the respective methods as they are. + + Returns: + an `AstraDBVectorStore` vectorstore. + """ + astra_db_store = AstraDBVectorStore._from_kwargs(embedding, **kwargs) + astra_db_store.add_texts( + texts=texts, + metadatas=metadatas, + ids=ids, + batch_size=kwargs.get("batch_size"), + batch_concurrency=kwargs.get("batch_concurrency"), + overwrite_concurrency=kwargs.get("overwrite_concurrency"), + ) + return astra_db_store + + @classmethod + async def afrom_texts( + cls: Type[AstraDBVectorStore], + texts: List[str], + embedding: Embeddings, + metadatas: Optional[List[dict]] = None, + ids: Optional[List[str]] = None, + **kwargs: Any, + ) -> AstraDBVectorStore: + """Create an Astra DB vectorstore from raw texts. + + Args: + texts (List[str]): the texts to insert. + embedding (Embeddings): the embedding function to use in the store. + metadatas (Optional[List[dict]]): metadata dicts for the texts. + ids (Optional[List[str]]): ids to associate to the texts. + *Additional arguments*: you can pass any argument that you would + to 'add_texts' and/or to the 'AstraDBVectorStore' constructor + (see these methods for details). These arguments will be + routed to the respective methods as they are. + + Returns: + an `AstraDBVectorStore` vectorstore. + """ + astra_db_store = AstraDBVectorStore._from_kwargs(embedding, **kwargs) + await astra_db_store.aadd_texts( + texts=texts, + metadatas=metadatas, + ids=ids, + batch_size=kwargs.get("batch_size"), + batch_concurrency=kwargs.get("batch_concurrency"), + overwrite_concurrency=kwargs.get("overwrite_concurrency"), + ) + return astra_db_store + + @classmethod + def from_documents( + cls: Type[AstraDBVectorStore], + documents: List[Document], + embedding: Embeddings, + **kwargs: Any, + ) -> AstraDBVectorStore: + """Create an Astra DB vectorstore from a document list. + + Utility method that defers to 'from_texts' (see that one). + + Args: see 'from_texts', except here you have to supply 'documents' + in place of 'texts' and 'metadatas'. + + Returns: + an `AstraDBVectorStore` vectorstore. + """ + return super().from_documents(documents, embedding, **kwargs) diff --git a/libs/partners/astradb/poetry.lock b/libs/partners/astradb/poetry.lock new file mode 100644 index 0000000000000..41c52a6e98ce4 --- /dev/null +++ b/libs/partners/astradb/poetry.lock @@ -0,0 +1,1114 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "astrapy" +version = "0.7.5" +description = "AstraPy is a Pythonic SDK for DataStax Astra" +optional = false +python-versions = ">=3.8.0,<4.0.0" +files = [ + {file = "astrapy-0.7.5-py3-none-any.whl", hash = "sha256:51daabbc59ed56f023233296e42372cd2a7468282f978c36ccc33a2e211beddc"}, + {file = "astrapy-0.7.5.tar.gz", hash = "sha256:72a31538c5fd06cbf91a235924bee81007d03b8c0feff1d7cf811e64a2adc7a8"}, +] + +[package.dependencies] +cassio = ">=0.1.4,<0.2.0" +deprecation = ">=2.1.0,<2.2.0" +httpx = {version = ">=0.26.0,<0.27.0", extras = ["http2"]} +toml = ">=0.10.2,<0.11.0" + +[[package]] +name = "cassandra-driver" +version = "3.29.0" +description = "DataStax Driver for Apache Cassandra" +optional = false +python-versions = "*" +files = [ + {file = "cassandra-driver-3.29.0.tar.gz", hash = "sha256:0a34f9534356e5fd33af8cdda109d5e945b6335cb50399b267c46368c4e93c98"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:28d6fe5379d55e4fc96785bd2e2cba029ef171cc43fb38fc507b9ba232917ac2"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:05e267412ccc9fe71ee4a81d98f2250df2429390fac4721f41dd17b65e4c41ac"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:84eacfc8e6461590eb1c2b9651ea809be298eb8283c2d844a6dad8058ee7928c"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8feeda01bb13dce1a74b0a94172b3b06b0d9d8f33d6fb56e1910d495b0e085e5"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bb0ef3297255bbade7b0c2d168c31d36ec904b1a9b42521d1d3d65c3148bbc7"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-win32.whl", hash = "sha256:39d78971a4e26ef65b77caa09c0e6ccfd7b2c52b0924c328fbfdca91667eb08e"}, + {file = "cassandra_driver-3.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:9dd713fe6388f3ba141cc2eef4737b5e4a27b0d1c1a6b0372b8ff3d2d35ccf79"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:76333d38cb663065d53ca658e15185b23aa0ce434f2876c455624d90d2ee0dbf"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ce92e0420bf18742b4bc433052c7c2e4aa72fa84898be2b26083e240ace343"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b90c2f052a102560e4fcf860f6d1ac35d3514ad36b1978cf821998f1e689f38"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fecf584a7f411d247d1925c66d527f7ecc73710b230b68cdacf2044fb57ae4b"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a678bc7107cc606ac8ff8cb58fe6abb0bb2a9ff5196016b3bd36926146c4dc62"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-win32.whl", hash = "sha256:e9badede26005fd993e2344e8a541a4133bc46a76a90969d57a90a028b2b8ca6"}, + {file = "cassandra_driver-3.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac6d2e6ad1a386f1b786de78102f918bcd5caac390c3e446558e5adee9464c6"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:01a8b4cdb056c209c5c4aa22e0d7f427b87cb98297a6efff29ea278da9a52698"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:73aa7b32dfad1f58fb00167052ab80b1b186b69baac7de4ad5cca785fff569be"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f7c446edba002b0fdd94f2b92c4752e16738ea7dce27d754103fcd086b4dcc9"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6843569360fb4a446d65f6733faed1207c252745a31a1d8dc02feff8f7f86a23"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1762d228bdd3f1bc5faa0812e1fcac75a36ab7504f3cfb7e9b5d2bf26a50c552"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-win32.whl", hash = "sha256:dd245817e0df048b780f45ac78b1840fe12deb5aea8873df4a11e0c44a68c19a"}, + {file = "cassandra_driver-3.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:002134a4152034ed66d9f9616ea391f44dfdf7c9f97d22bd4d4f64d70020b91b"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d9b652db99f69ee62bbd679a29cfbab398ebf2bfc195632d57ecb3f246baf48b"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6ac82ae8240b4f4f1a3d1e6f21a4ecd9948afdfedef6f23235fac85d20d11076"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1590d231503594546dfdbf6119d805e1a0b22de98a1a6ec0de79a1bacc59ecb5"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcf9dee3b120062a8224278da56ab088c2c081a79dc9e017f065dccd421b6477"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb9a123ad86152d2a1ca31f4a3d91d72cbd3ed7a88a4c3cd5f6f72173a1bfbd8"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-win32.whl", hash = "sha256:cc6794ca9c94e157570e2b7b5a04458259ee29c5a0d0de50a9e0c8e2da8f5455"}, + {file = "cassandra_driver-3.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:096eef84ab466b090a69a4e9d85e65d57e926ff7d7897443e7b637d40277f373"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:befb723d62ee650cb3afd9668245ee9ce6ba5394dbd58352866ff2baa0324101"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4108fb2a64a8fd77948003ff0ca4d296364d9ff7381f4abe7a9db202e6378446"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cd4701cc083e047553888dbd99d2d5119b5b3da54b9e8034a80b8c8d516142c"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b94c5273bf3c2f252aed8624303c46d9d4e6dc7663f53ed9c9335e5d0dcb88"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3609f2eda8ee2a6a9b2c9c84c009bf54a7695b9dfc21700b88dd0a2140c82c95"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-win32.whl", hash = "sha256:aaeff4c3af3100510e329177c46da89aab6d444070f4fa370df5328b8ad488b4"}, + {file = "cassandra_driver-3.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:88d9a6abd0e0af199636ff9386d0b9b81b1dd189e22c8498ecaa546256bacf24"}, +] + +[package.dependencies] +geomet = ">=0.1,<0.3" + +[package.extras] +cle = ["cryptography (>=35.0)"] +graph = ["gremlinpython (==3.4.6)"] + +[[package]] +name = "cassio" +version = "0.1.4" +description = "A framework-agnostic Python library to seamlessly integrate Apache Cassandra(R) with ML/LLM/genAI workloads." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cassio-0.1.4-py3-none-any.whl", hash = "sha256:ab997879c36807ff5b9771ff35941f104c0f0e60e1595118279869b5b95c146f"}, + {file = "cassio-0.1.4.tar.gz", hash = "sha256:df495c459ee5e9194e4780ac3ea1aaf79a4443e6d06f0eeb67aac6e3cd8c47aa"}, +] + +[package.dependencies] +cassandra-driver = ">=3.28.0" +numpy = ">=1.0" +requests = ">=2" + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "codespell" +version = "2.2.6" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "deprecation" +version = "2.1.0" +description = "A library to handle automated deprecations" +optional = false +python-versions = "*" +files = [ + {file = "deprecation-2.1.0-py2.py3-none-any.whl", hash = "sha256:a10811591210e1fb0e768a8c25517cabeabcba6f0bf96564f8ff45189f90b14a"}, + {file = "deprecation-2.1.0.tar.gz", hash = "sha256:72b3bde64e5d778694b0cf68178aed03d15e15477116add3fb773e581f9518ff"}, +] + +[package.dependencies] +packaging = "*" + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "geomet" +version = "0.2.1.post1" +description = "GeoJSON <-> WKT/WKB conversion utilities" +optional = false +python-versions = ">2.6, !=3.3.*, <4" +files = [ + {file = "geomet-0.2.1.post1-py3-none-any.whl", hash = "sha256:a41a1e336b381416d6cbed7f1745c848e91defaa4d4c1bdc1312732e46ffad2b"}, + {file = "geomet-0.2.1.post1.tar.gz", hash = "sha256:91d754f7c298cbfcabd3befdb69c641c27fe75e808b27aa55028605761d17e95"}, +] + +[package.dependencies] +click = "*" +six = "*" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "1.0.3" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.3-py3-none-any.whl", hash = "sha256:9a6a501c3099307d9fd76ac244e08503427679b1e81ceb1d922485e2f2462ad2"}, + {file = "httpcore-1.0.3.tar.gz", hash = "sha256:5c0f9546ad17dac4d0772b0808856eb616eb8b48ce94f49ed819fd6982a8a544"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.24.0)"] + +[[package]] +name = "httpx" +version = "0.26.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.26.0-py3-none-any.whl", hash = "sha256:8915f5a3627c4d47b73e8202457cb28f1266982d1159bd5779d86a80c0eab1cd"}, + {file = "httpx-0.26.0.tar.gz", hash = "sha256:451b55c30d5185ea6b23c2c793abf9bb237d2a7dfb901ced6ff69ad37ec1dfaf"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "langchain-core" +version = "0.1.23" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [] +develop = true + +[package.dependencies] +anyio = ">=3,<5" +jsonpatch = "^1.33" +langsmith = "^0.1.0" +packaging = "^23.2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = "^2" +tenacity = "^8.1.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[package.source] +type = "directory" +url = "../../core" + +[[package]] +name = "langsmith" +version = "0.1.1" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, +] + +[package.dependencies] +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "mypy" +version = "0.991" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, + {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, + {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, + {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, + {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, + {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, + {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, + {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, + {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, + {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, + {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, + {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, + {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, + {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, + {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, + {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, + {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, + {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, + {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, + {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, + {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, + {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, + {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.24.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c0bfb52d2169d58c1cdb8cc1f16989101639b34c7d3ce60ed70b19c63eba0b64"}, + {file = "numpy-1.24.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ed094d4f0c177b1b8e7aa9cba7d6ceed51c0e569a5318ac0ca9a090680a6a1b1"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79fc682a374c4a8ed08b331bef9c5f582585d1048fa6d80bc6c35bc384eee9b4"}, + {file = "numpy-1.24.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ffe43c74893dbf38c2b0a1f5428760a1a9c98285553c89e12d70a96a7f3a4d6"}, + {file = "numpy-1.24.4-cp310-cp310-win32.whl", hash = "sha256:4c21decb6ea94057331e111a5bed9a79d335658c27ce2adb580fb4d54f2ad9bc"}, + {file = "numpy-1.24.4-cp310-cp310-win_amd64.whl", hash = "sha256:b4bea75e47d9586d31e892a7401f76e909712a0fd510f58f5337bea9572c571e"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f136bab9c2cfd8da131132c2cf6cc27331dd6fae65f95f69dcd4ae3c3639c810"}, + {file = "numpy-1.24.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e2926dac25b313635e4d6cf4dc4e51c8c0ebfed60b801c799ffc4c32bf3d1254"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:222e40d0e2548690405b0b3c7b21d1169117391c2e82c378467ef9ab4c8f0da7"}, + {file = "numpy-1.24.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7215847ce88a85ce39baf9e89070cb860c98fdddacbaa6c0da3ffb31b3350bd5"}, + {file = "numpy-1.24.4-cp311-cp311-win32.whl", hash = "sha256:4979217d7de511a8d57f4b4b5b2b965f707768440c17cb70fbf254c4b225238d"}, + {file = "numpy-1.24.4-cp311-cp311-win_amd64.whl", hash = "sha256:b7b1fc9864d7d39e28f41d089bfd6353cb5f27ecd9905348c24187a768c79694"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1452241c290f3e2a312c137a9999cdbf63f78864d63c79039bda65ee86943f61"}, + {file = "numpy-1.24.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:04640dab83f7c6c85abf9cd729c5b65f1ebd0ccf9de90b270cd61935eef0197f"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5425b114831d1e77e4b5d812b69d11d962e104095a5b9c3b641a218abcc050e"}, + {file = "numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd80e219fd4c71fc3699fc1dadac5dcf4fd882bfc6f7ec53d30fa197b8ee22dc"}, + {file = "numpy-1.24.4-cp38-cp38-win32.whl", hash = "sha256:4602244f345453db537be5314d3983dbf5834a9701b7723ec28923e2889e0bb2"}, + {file = "numpy-1.24.4-cp38-cp38-win_amd64.whl", hash = "sha256:692f2e0f55794943c5bfff12b3f56f99af76f902fc47487bdfe97856de51a706"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2541312fbf09977f3b3ad449c4e5f4bb55d0dbf79226d7724211acc905049400"}, + {file = "numpy-1.24.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9667575fb6d13c95f1b36aca12c5ee3356bf001b714fc354eb5465ce1609e62f"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a86ed21e4f87050382c7bc96571755193c4c1392490744ac73d660e8f564a9"}, + {file = "numpy-1.24.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d11efb4dbecbdf22508d55e48d9c8384db795e1b7b51ea735289ff96613ff74d"}, + {file = "numpy-1.24.4-cp39-cp39-win32.whl", hash = "sha256:6620c0acd41dbcb368610bb2f4d83145674040025e5536954782467100aa8835"}, + {file = "numpy-1.24.4-cp39-cp39-win_amd64.whl", hash = "sha256:befe2bf740fd8373cf56149a5c23a0f601e82869598d41f8e188a0e9869926f8"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:31f13e25b4e304632a4619d0e0777662c2ffea99fcae2029556b17d8ff958aef"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95f7ac6540e95bc440ad77f56e520da5bf877f87dca58bd095288dce8940532a"}, + {file = "numpy-1.24.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e98f220aa76ca2a977fe435f5b04d7b3470c0a2e6312907b37ba6068f26787f2"}, + {file = "numpy-1.24.4.tar.gz", hash = "sha256:80f5e3a4e498641401868df4208b74581206afbee7cf7b8329daae82676d9463"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-dotenv" +version = "0.5.2" +description = "A py.test plugin that parses environment files before running tests" +optional = false +python-versions = "*" +files = [ + {file = "pytest-dotenv-0.5.2.tar.gz", hash = "sha256:2dc6c3ac6d8764c71c6d2804e902d0ff810fa19692e95fe138aefc9b1aa73732"}, + {file = "pytest_dotenv-0.5.2-py3-none-any.whl", hash = "sha256:40a2cece120a213898afaa5407673f6bd924b1fa7eafce6bda0e8abffe2f710f"}, +] + +[package.dependencies] +pytest = ">=5.0.0" +python-dotenv = ">=0.9.1" + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-watcher" +version = "0.3.5" +description = "Automatically rerun your tests on file modifications" +optional = false +python-versions = ">=3.7.0,<4.0.0" +files = [ + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, +] + +[package.dependencies] +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} +watchdog = ">=2.0.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "syrupy" +version = "4.6.1" +description = "Pytest Snapshot Test Utility" +optional = false +python-versions = ">=3.8.1,<4" +files = [ + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9.0.0" + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "urllib3" +version = "2.2.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "watchdog" +version = "4.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.8.1,<4.0" +content-hash = "fe6988cc6483f13341578e46b85a42fe242c21c5fdbe4f1f64384369b916b186" diff --git a/libs/partners/astradb/pyproject.toml b/libs/partners/astradb/pyproject.toml new file mode 100644 index 0000000000000..3ba131e0a59f7 --- /dev/null +++ b/libs/partners/astradb/pyproject.toml @@ -0,0 +1,89 @@ +[tool.poetry] +name = "langchain-astradb" +version = "0.0.1" +description = "An integration package connecting Astra DB and LangChain" +authors = [] +readme = "README.md" + +[tool.poetry.dependencies] +python = ">=3.8.1,<4.0" +langchain-core = "^0.1.5" +astrapy = "^0.7.5" +numpy = "^1" + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.3.0" +pytest-dotenv = "^0.5.2" +freezegun = "^1.2.2" +pytest-mock = "^3.10.0" +syrupy = "^4.0.2" +pytest-watcher = "^0.3.4" +pytest-asyncio = "^0.21.1" +langchain-core = { path = "../../core", develop = true } + +[tool.poetry.group.codespell] +optional = true + +[tool.poetry.group.codespell.dependencies] +codespell = "^2.2.0" + +[tool.poetry.group.test_integration] +optional = true + +[tool.poetry.group.test_integration.dependencies] + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "^0.1.5" + +[tool.poetry.group.typing.dependencies] +mypy = "^0.991" +langchain-core = { path = "../../core", develop = true } + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +langchain-core = { path = "../../core", develop = true } + +[tool.ruff] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort +] + +[tool.mypy] +disallow_untyped_defs = "True" + +[tool.coverage.run] +omit = ["tests/*"] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +# --strict-markers will raise errors on unknown marks. +# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks +# +# https://docs.pytest.org/en/7.1.x/reference/reference.html +# --strict-config any warnings encountered while parsing the `pytest` +# section of the configuration file raise errors. +# +# https://github.com/tophat/syrupy +# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. +addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" +# Registering custom markers. +# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers +markers = [ + "requires: mark tests as requiring a specific library", + "asyncio: mark tests as requiring asyncio", + "compile: mark placeholder test used to compile integration tests without running them", +] +asyncio_mode = "auto" diff --git a/libs/partners/astradb/scripts/check_imports.py b/libs/partners/astradb/scripts/check_imports.py new file mode 100644 index 0000000000000..fd21a4975b7f0 --- /dev/null +++ b/libs/partners/astradb/scripts/check_imports.py @@ -0,0 +1,17 @@ +import sys +import traceback +from importlib.machinery import SourceFileLoader + +if __name__ == "__main__": + files = sys.argv[1:] + has_failure = False + for file in files: + try: + SourceFileLoader("x", file).load_module() + except Exception: + has_faillure = True + print(file) + traceback.print_exc() + print() + + sys.exit(1 if has_failure else 0) diff --git a/libs/partners/astradb/scripts/check_pydantic.sh b/libs/partners/astradb/scripts/check_pydantic.sh new file mode 100755 index 0000000000000..06b5bb81ae236 --- /dev/null +++ b/libs/partners/astradb/scripts/check_pydantic.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# This script searches for lines starting with "import pydantic" or "from pydantic" +# in tracked files within a Git repository. +# +# Usage: ./scripts/check_pydantic.sh /path/to/repository + +# Check if a path argument is provided +if [ $# -ne 1 ]; then + echo "Usage: $0 /path/to/repository" + exit 1 +fi + +repository_path="$1" + +# Search for lines matching the pattern within the specified repository +result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') + +# Check if any matching lines were found +if [ -n "$result" ]; then + echo "ERROR: The following lines need to be updated:" + echo "$result" + echo "Please replace the code with an import from langchain_core.pydantic_v1." + echo "For example, replace 'from pydantic import BaseModel'" + echo "with 'from langchain_core.pydantic_v1 import BaseModel'" + exit 1 +fi diff --git a/libs/partners/astradb/scripts/lint_imports.sh b/libs/partners/astradb/scripts/lint_imports.sh new file mode 100755 index 0000000000000..695613c7ba8fd --- /dev/null +++ b/libs/partners/astradb/scripts/lint_imports.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -eu + +# Initialize a variable to keep track of errors +errors=0 + +# make sure not importing from langchain or langchain_experimental +git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) + +# Decide on an exit status based on the errors +if [ "$errors" -gt 0 ]; then + exit 1 +else + exit 0 +fi diff --git a/libs/partners/astradb/tests/__init__.py b/libs/partners/astradb/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/astradb/tests/integration_tests/.env.example b/libs/partners/astradb/tests/integration_tests/.env.example new file mode 100644 index 0000000000000..4259d87682c34 --- /dev/null +++ b/libs/partners/astradb/tests/integration_tests/.env.example @@ -0,0 +1,5 @@ +# astra db +ASTRA_DB_API_ENDPOINT=https://your_astra_db_id-your_region.apps.astra.datastax.com +ASTRA_DB_APPLICATION_TOKEN=AstraCS:your_astra_db_application_token +# ASTRA_DB_KEYSPACE=your_astra_db_namespace +# ASTRA_DB_SKIP_COLLECTION_DELETIONS=true diff --git a/libs/partners/astradb/tests/integration_tests/__init__.py b/libs/partners/astradb/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/astradb/tests/integration_tests/conftest.py b/libs/partners/astradb/tests/integration_tests/conftest.py new file mode 100644 index 0000000000000..02b518e8695a2 --- /dev/null +++ b/libs/partners/astradb/tests/integration_tests/conftest.py @@ -0,0 +1,19 @@ +# Getting the absolute path of the current file's directory +import os + +ABS_PATH = os.path.dirname(os.path.abspath(__file__)) + +# Getting the absolute path of the project's root directory +PROJECT_DIR = os.path.abspath(os.path.join(ABS_PATH, os.pardir, os.pardir)) + + +# Loading the .env file if it exists +def _load_env() -> None: + dotenv_path = os.path.join(PROJECT_DIR, "tests", "integration_tests", ".env") + if os.path.exists(dotenv_path): + from dotenv import load_dotenv + + load_dotenv(dotenv_path) + + +_load_env() diff --git a/libs/partners/astradb/tests/integration_tests/test_compile.py b/libs/partners/astradb/tests/integration_tests/test_compile.py new file mode 100644 index 0000000000000..33ecccdfa0fbd --- /dev/null +++ b/libs/partners/astradb/tests/integration_tests/test_compile.py @@ -0,0 +1,7 @@ +import pytest + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" + pass diff --git a/libs/partners/astradb/tests/integration_tests/vectorstores/test_astradb.py b/libs/partners/astradb/tests/integration_tests/vectorstores/test_astradb.py new file mode 100644 index 0000000000000..de68c016a9809 --- /dev/null +++ b/libs/partners/astradb/tests/integration_tests/vectorstores/test_astradb.py @@ -0,0 +1,869 @@ +""" +Test of Astra DB vector store class `AstraDBVectorStore` + +Required to run this test: + - a recent `astrapy` Python package available + - an Astra DB instance; + - the two environment variables set: + export ASTRA_DB_API_ENDPOINT="https://-us-east1.apps.astra.datastax.com" + export ASTRA_DB_APPLICATION_TOKEN="AstraCS:........." + - optionally this as well (otherwise defaults are used): + export ASTRA_DB_KEYSPACE="my_keyspace" + - optionally: + export SKIP_COLLECTION_DELETE="0" ("1" = no deletions, default) +""" + +import json +import math +import os +from typing import Iterable, List, Optional, TypedDict + +import pytest +from langchain_core.documents import Document +from langchain_core.embeddings import Embeddings + +from langchain_astradb.vectorstores import AstraDBVectorStore + +# Faster testing (no actual collection deletions). Off by default (=full tests) +SKIP_COLLECTION_DELETE = ( + int(os.environ.get("ASTRA_DB_SKIP_COLLECTION_DELETIONS", "0")) != 0 +) + +COLLECTION_NAME_DIM2 = "lc_test_d2" +COLLECTION_NAME_DIM2_EUCLIDEAN = "lc_test_d2_eucl" + +MATCH_EPSILON = 0.0001 + +# Ad-hoc embedding classes: + + +class AstraDBCredentials(TypedDict): + token: str + api_endpoint: str + namespace: Optional[str] + + +class SomeEmbeddings(Embeddings): + """ + Turn a sentence into an embedding vector in some way. + Not important how. It is deterministic is all that counts. + """ + + def __init__(self, dimension: int) -> None: + self.dimension = dimension + + def embed_documents(self, texts: List[str]) -> List[List[float]]: + return [self.embed_query(txt) for txt in texts] + + async def aembed_documents(self, texts: List[str]) -> List[List[float]]: + return self.embed_documents(texts) + + def embed_query(self, text: str) -> List[float]: + unnormed0 = [ord(c) for c in text[: self.dimension]] + unnormed = (unnormed0 + [1] + [0] * (self.dimension - 1 - len(unnormed0)))[ + : self.dimension + ] + norm = sum(x * x for x in unnormed) ** 0.5 + normed = [x / norm for x in unnormed] + return normed + + async def aembed_query(self, text: str) -> List[float]: + return self.embed_query(text) + + +class ParserEmbeddings(Embeddings): + """ + Parse input texts: if they are json for a List[float], fine. + Otherwise, return all zeros and call it a day. + """ + + def __init__(self, dimension: int) -> None: + self.dimension = dimension + + def embed_documents(self, texts: List[str]) -> List[List[float]]: + return [self.embed_query(txt) for txt in texts] + + async def aembed_documents(self, texts: List[str]) -> List[List[float]]: + return self.embed_documents(texts) + + def embed_query(self, text: str) -> List[float]: + try: + vals = json.loads(text) + assert len(vals) == self.dimension + return vals + except Exception: + print(f'[ParserEmbeddings] Returning a moot vector for "{text}"') + return [0.0] * self.dimension + + async def aembed_query(self, text: str) -> List[float]: + return self.embed_query(text) + + +def _has_env_vars() -> bool: + return all( + [ + "ASTRA_DB_APPLICATION_TOKEN" in os.environ, + "ASTRA_DB_API_ENDPOINT" in os.environ, + ] + ) + + +@pytest.fixture(scope="session") +def astradb_credentials() -> Iterable[AstraDBCredentials]: + yield { + "token": os.environ["ASTRA_DB_APPLICATION_TOKEN"], + "api_endpoint": os.environ["ASTRA_DB_API_ENDPOINT"], + "namespace": os.environ.get("ASTRA_DB_KEYSPACE"), + } + + +@pytest.fixture(scope="function") +def store_someemb( + astradb_credentials: AstraDBCredentials, +) -> Iterable[AstraDBVectorStore]: + emb = SomeEmbeddings(dimension=2) + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + v_store.clear() + + yield v_store + + if not SKIP_COLLECTION_DELETE: + v_store.delete_collection() + else: + v_store.clear() + + +@pytest.fixture(scope="function") +def store_parseremb( + astradb_credentials: AstraDBCredentials, +) -> Iterable[AstraDBVectorStore]: + emb = ParserEmbeddings(dimension=2) + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + v_store.clear() + + yield v_store + + if not SKIP_COLLECTION_DELETE: + v_store.delete_collection() + else: + v_store.clear() + + +@pytest.mark.requires("astrapy") +@pytest.mark.skipif(not _has_env_vars(), reason="Missing Astra DB env. vars") +class TestAstraDBVectorStore: + def test_astradb_vectorstore_create_delete( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Create and delete.""" + from astrapy.db import AstraDB as LibAstraDB + + emb = SomeEmbeddings(dimension=2) + # creation by passing the connection secrets + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + v_store.add_texts("Sample 1") + if not SKIP_COLLECTION_DELETE: + v_store.delete_collection() + else: + v_store.clear() + + # Creation by passing a ready-made astrapy client: + astra_db_client = LibAstraDB( + **astradb_credentials, + ) + v_store_2 = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + astra_db_client=astra_db_client, + ) + v_store_2.add_texts("Sample 2") + if not SKIP_COLLECTION_DELETE: + v_store_2.delete_collection() + else: + v_store_2.clear() + + async def test_astradb_vectorstore_create_delete_async( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Create and delete.""" + emb = SomeEmbeddings(dimension=2) + # creation by passing the connection secrets + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + await v_store.adelete_collection() + # Creation by passing a ready-made astrapy client: + from astrapy.db import AsyncAstraDB + + astra_db_client = AsyncAstraDB( + **astradb_credentials, + ) + v_store_2 = AstraDBVectorStore( + embedding=emb, + collection_name="lc_test_2_async", + async_astra_db_client=astra_db_client, + ) + if not SKIP_COLLECTION_DELETE: + await v_store_2.adelete_collection() + else: + await v_store_2.aclear() + + @pytest.mark.skipif( + SKIP_COLLECTION_DELETE, + reason="Collection-deletion tests are suppressed", + ) + def test_astradb_vectorstore_pre_delete_collection( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Use of the pre_delete_collection flag.""" + emb = SomeEmbeddings(dimension=2) + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + v_store.clear() + try: + v_store.add_texts( + texts=["aa"], + metadatas=[ + {"k": "a", "ord": 0}, + ], + ids=["a"], + ) + res1 = v_store.similarity_search("aa", k=5) + assert len(res1) == 1 + v_store = AstraDBVectorStore( + embedding=emb, + pre_delete_collection=True, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + res1 = v_store.similarity_search("aa", k=5) + assert len(res1) == 0 + finally: + v_store.delete_collection() + + @pytest.mark.skipif( + SKIP_COLLECTION_DELETE, + reason="Collection-deletion tests are suppressed", + ) + async def test_astradb_vectorstore_pre_delete_collection_async( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Use of the pre_delete_collection flag.""" + emb = SomeEmbeddings(dimension=2) + # creation by passing the connection secrets + + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + try: + await v_store.aadd_texts( + texts=["aa"], + metadatas=[ + {"k": "a", "ord": 0}, + ], + ids=["a"], + ) + res1 = await v_store.asimilarity_search("aa", k=5) + assert len(res1) == 1 + v_store = AstraDBVectorStore( + embedding=emb, + pre_delete_collection=True, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + res1 = await v_store.asimilarity_search("aa", k=5) + assert len(res1) == 0 + finally: + await v_store.adelete_collection() + + def test_astradb_vectorstore_from_x( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """from_texts and from_documents methods.""" + emb = SomeEmbeddings(dimension=2) + # prepare empty collection + AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ).clear() + # from_texts + v_store = AstraDBVectorStore.from_texts( + texts=["Hi", "Ho"], + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + try: + assert v_store.similarity_search("Ho", k=1)[0].page_content == "Ho" + finally: + if not SKIP_COLLECTION_DELETE: + v_store.delete_collection() + else: + v_store.clear() + + # from_documents + v_store_2 = AstraDBVectorStore.from_documents( + [ + Document(page_content="Hee"), + Document(page_content="Hoi"), + ], + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + try: + assert v_store_2.similarity_search("Hoi", k=1)[0].page_content == "Hoi" + finally: + if not SKIP_COLLECTION_DELETE: + v_store_2.delete_collection() + else: + v_store_2.clear() + + async def test_astradb_vectorstore_from_x_async( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """from_texts and from_documents methods.""" + emb = SomeEmbeddings(dimension=2) + # prepare empty collection + await AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ).aclear() + # from_texts + v_store = await AstraDBVectorStore.afrom_texts( + texts=["Hi", "Ho"], + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + try: + assert (await v_store.asimilarity_search("Ho", k=1))[0].page_content == "Ho" + finally: + if not SKIP_COLLECTION_DELETE: + await v_store.adelete_collection() + else: + await v_store.aclear() + + # from_documents + v_store_2 = await AstraDBVectorStore.afrom_documents( + [ + Document(page_content="Hee"), + Document(page_content="Hoi"), + ], + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ) + try: + assert (await v_store_2.asimilarity_search("Hoi", k=1))[ + 0 + ].page_content == "Hoi" + finally: + if not SKIP_COLLECTION_DELETE: + await v_store_2.adelete_collection() + else: + await v_store_2.aclear() + + def test_astradb_vectorstore_crud(self, store_someemb: AstraDBVectorStore) -> None: + """Basic add/delete/update behaviour.""" + res0 = store_someemb.similarity_search("Abc", k=2) + assert res0 == [] + # write and check again + store_someemb.add_texts( + texts=["aa", "bb", "cc"], + metadatas=[ + {"k": "a", "ord": 0}, + {"k": "b", "ord": 1}, + {"k": "c", "ord": 2}, + ], + ids=["a", "b", "c"], + ) + res1 = store_someemb.similarity_search("Abc", k=5) + assert {doc.page_content for doc in res1} == {"aa", "bb", "cc"} + # partial overwrite and count total entries + store_someemb.add_texts( + texts=["cc", "dd"], + metadatas=[ + {"k": "c_new", "ord": 102}, + {"k": "d_new", "ord": 103}, + ], + ids=["c", "d"], + ) + res2 = store_someemb.similarity_search("Abc", k=10) + assert len(res2) == 4 + # pick one that was just updated and check its metadata + res3 = store_someemb.similarity_search_with_score_id( + query="cc", k=1, filter={"k": "c_new"} + ) + print(str(res3)) + doc3, score3, id3 = res3[0] + assert doc3.page_content == "cc" + assert doc3.metadata == {"k": "c_new", "ord": 102} + assert score3 > 0.999 # leaving some leeway for approximations... + assert id3 == "c" + # delete and count again + del1_res = store_someemb.delete(["b"]) + assert del1_res is True + del2_res = store_someemb.delete(["a", "c", "Z!"]) + assert del2_res is True # a non-existing ID was supplied + assert len(store_someemb.similarity_search("xy", k=10)) == 1 + # clear store + store_someemb.clear() + assert store_someemb.similarity_search("Abc", k=2) == [] + # add_documents with "ids" arg passthrough + store_someemb.add_documents( + [ + Document(page_content="vv", metadata={"k": "v", "ord": 204}), + Document(page_content="ww", metadata={"k": "w", "ord": 205}), + ], + ids=["v", "w"], + ) + assert len(store_someemb.similarity_search("xy", k=10)) == 2 + res4 = store_someemb.similarity_search("ww", k=1, filter={"k": "w"}) + assert res4[0].metadata["ord"] == 205 + + async def test_astradb_vectorstore_crud_async( + self, store_someemb: AstraDBVectorStore + ) -> None: + """Basic add/delete/update behaviour.""" + res0 = await store_someemb.asimilarity_search("Abc", k=2) + assert res0 == [] + # write and check again + await store_someemb.aadd_texts( + texts=["aa", "bb", "cc"], + metadatas=[ + {"k": "a", "ord": 0}, + {"k": "b", "ord": 1}, + {"k": "c", "ord": 2}, + ], + ids=["a", "b", "c"], + ) + res1 = await store_someemb.asimilarity_search("Abc", k=5) + assert {doc.page_content for doc in res1} == {"aa", "bb", "cc"} + # partial overwrite and count total entries + await store_someemb.aadd_texts( + texts=["cc", "dd"], + metadatas=[ + {"k": "c_new", "ord": 102}, + {"k": "d_new", "ord": 103}, + ], + ids=["c", "d"], + ) + res2 = await store_someemb.asimilarity_search("Abc", k=10) + assert len(res2) == 4 + # pick one that was just updated and check its metadata + res3 = await store_someemb.asimilarity_search_with_score_id( + query="cc", k=1, filter={"k": "c_new"} + ) + print(str(res3)) + doc3, score3, id3 = res3[0] + assert doc3.page_content == "cc" + assert doc3.metadata == {"k": "c_new", "ord": 102} + assert score3 > 0.999 # leaving some leeway for approximations... + assert id3 == "c" + # delete and count again + del1_res = await store_someemb.adelete(["b"]) + assert del1_res is True + del2_res = await store_someemb.adelete(["a", "c", "Z!"]) + assert del2_res is False # a non-existing ID was supplied + assert len(await store_someemb.asimilarity_search("xy", k=10)) == 1 + # clear store + await store_someemb.aclear() + assert await store_someemb.asimilarity_search("Abc", k=2) == [] + # add_documents with "ids" arg passthrough + await store_someemb.aadd_documents( + [ + Document(page_content="vv", metadata={"k": "v", "ord": 204}), + Document(page_content="ww", metadata={"k": "w", "ord": 205}), + ], + ids=["v", "w"], + ) + assert len(await store_someemb.asimilarity_search("xy", k=10)) == 2 + res4 = await store_someemb.asimilarity_search("ww", k=1, filter={"k": "w"}) + assert res4[0].metadata["ord"] == 205 + + def test_astradb_vectorstore_mmr(self, store_parseremb: AstraDBVectorStore) -> None: + """ + MMR testing. We work on the unit circle with angle multiples + of 2*pi/20 and prepare a store with known vectors for a controlled + MMR outcome. + """ + + def _v_from_i(i: int, N: int) -> str: + angle = 2 * math.pi * i / N + vector = [math.cos(angle), math.sin(angle)] + return json.dumps(vector) + + i_vals = [0, 4, 5, 13] + N_val = 20 + store_parseremb.add_texts( + [_v_from_i(i, N_val) for i in i_vals], metadatas=[{"i": i} for i in i_vals] + ) + res1 = store_parseremb.max_marginal_relevance_search( + _v_from_i(3, N_val), + k=2, + fetch_k=3, + ) + res_i_vals = {doc.metadata["i"] for doc in res1} + assert res_i_vals == {0, 4} + + async def test_astradb_vectorstore_mmr_async( + self, store_parseremb: AstraDBVectorStore + ) -> None: + """ + MMR testing. We work on the unit circle with angle multiples + of 2*pi/20 and prepare a store with known vectors for a controlled + MMR outcome. + """ + + def _v_from_i(i: int, N: int) -> str: + angle = 2 * math.pi * i / N + vector = [math.cos(angle), math.sin(angle)] + return json.dumps(vector) + + i_vals = [0, 4, 5, 13] + N_val = 20 + await store_parseremb.aadd_texts( + [_v_from_i(i, N_val) for i in i_vals], + metadatas=[{"i": i} for i in i_vals], + ) + res1 = await store_parseremb.amax_marginal_relevance_search( + _v_from_i(3, N_val), + k=2, + fetch_k=3, + ) + res_i_vals = {doc.metadata["i"] for doc in res1} + assert res_i_vals == {0, 4} + + def test_astradb_vectorstore_metadata( + self, store_someemb: AstraDBVectorStore + ) -> None: + """Metadata filtering.""" + store_someemb.add_documents( + [ + Document( + page_content="q", + metadata={"ord": ord("q"), "group": "consonant"}, + ), + Document( + page_content="w", + metadata={"ord": ord("w"), "group": "consonant"}, + ), + Document( + page_content="r", + metadata={"ord": ord("r"), "group": "consonant"}, + ), + Document( + page_content="e", + metadata={"ord": ord("e"), "group": "vowel"}, + ), + Document( + page_content="i", + metadata={"ord": ord("i"), "group": "vowel"}, + ), + Document( + page_content="o", + metadata={"ord": ord("o"), "group": "vowel"}, + ), + ] + ) + # no filters + res0 = store_someemb.similarity_search("x", k=10) + assert {doc.page_content for doc in res0} == set("qwreio") + # single filter + res1 = store_someemb.similarity_search( + "x", + k=10, + filter={"group": "vowel"}, + ) + assert {doc.page_content for doc in res1} == set("eio") + # multiple filters + res2 = store_someemb.similarity_search( + "x", + k=10, + filter={"group": "consonant", "ord": ord("q")}, + ) + assert {doc.page_content for doc in res2} == set("q") + # excessive filters + res3 = store_someemb.similarity_search( + "x", + k=10, + filter={"group": "consonant", "ord": ord("q"), "case": "upper"}, + ) + assert res3 == [] + # filter with logical operator + res4 = store_someemb.similarity_search( + "x", + k=10, + filter={"$or": [{"ord": ord("q")}, {"ord": ord("r")}]}, + ) + assert {doc.page_content for doc in res4} == {"q", "r"} + + def test_astradb_vectorstore_similarity_scale( + self, store_parseremb: AstraDBVectorStore + ) -> None: + """Scale of the similarity scores.""" + store_parseremb.add_texts( + texts=[ + json.dumps([1, 1]), + json.dumps([-1, -1]), + ], + ids=["near", "far"], + ) + res1 = store_parseremb.similarity_search_with_score( + json.dumps([0.5, 0.5]), + k=2, + ) + scores = [sco for _, sco in res1] + sco_near, sco_far = scores + assert abs(1 - sco_near) < MATCH_EPSILON and abs(sco_far) < MATCH_EPSILON + + async def test_astradb_vectorstore_similarity_scale_async( + self, store_parseremb: AstraDBVectorStore + ) -> None: + """Scale of the similarity scores.""" + await store_parseremb.aadd_texts( + texts=[ + json.dumps([1, 1]), + json.dumps([-1, -1]), + ], + ids=["near", "far"], + ) + res1 = await store_parseremb.asimilarity_search_with_score( + json.dumps([0.5, 0.5]), + k=2, + ) + scores = [sco for _, sco in res1] + sco_near, sco_far = scores + assert abs(1 - sco_near) < MATCH_EPSILON and abs(sco_far) < MATCH_EPSILON + + def test_astradb_vectorstore_massive_delete( + self, store_someemb: AstraDBVectorStore + ) -> None: + """Larger-scale bulk deletes.""" + M = 50 + texts = [str(i + 1 / 7.0) for i in range(2 * M)] + ids0 = ["doc_%i" % i for i in range(M)] + ids1 = ["doc_%i" % (i + M) for i in range(M)] + ids = ids0 + ids1 + store_someemb.add_texts(texts=texts, ids=ids) + # deleting a bunch of these + del_res0 = store_someemb.delete(ids0) + assert del_res0 is True + # deleting the rest plus a fake one + del_res1 = store_someemb.delete(ids1 + ["ghost!"]) + assert del_res1 is True # ensure no error + # nothing left + assert store_someemb.similarity_search("x", k=2 * M) == [] + + @pytest.mark.skipif( + SKIP_COLLECTION_DELETE, + reason="Collection-deletion tests are suppressed", + ) + def test_astradb_vectorstore_delete_collection( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """behaviour of 'delete_collection'.""" + collection_name = COLLECTION_NAME_DIM2 + emb = SomeEmbeddings(dimension=2) + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=collection_name, + **astradb_credentials, + ) + v_store.add_texts(["huh"]) + assert len(v_store.similarity_search("hah", k=10)) == 1 + # another instance pointing to the same collection on DB + v_store_kenny = AstraDBVectorStore( + embedding=emb, + collection_name=collection_name, + **astradb_credentials, + ) + v_store_kenny.delete_collection() + # dropped on DB, but 'v_store' should have no clue: + with pytest.raises(ValueError): + _ = v_store.similarity_search("hah", k=10) + + def test_astradb_vectorstore_custom_params( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Custom batch size and concurrency params.""" + emb = SomeEmbeddings(dimension=2) + # prepare empty collection + AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ).clear() + v_store = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + batch_size=17, + bulk_insert_batch_concurrency=13, + bulk_insert_overwrite_concurrency=7, + bulk_delete_concurrency=19, + ) + try: + # add_texts + N = 50 + texts = [str(i + 1 / 7.0) for i in range(N)] + ids = ["doc_%i" % i for i in range(N)] + v_store.add_texts(texts=texts, ids=ids) + v_store.add_texts( + texts=texts, + ids=ids, + batch_size=19, + batch_concurrency=7, + overwrite_concurrency=13, + ) + # + _ = v_store.delete(ids[: N // 2]) + _ = v_store.delete(ids[N // 2 :], concurrency=23) + # + finally: + if not SKIP_COLLECTION_DELETE: + v_store.delete_collection() + else: + v_store.clear() + + async def test_astradb_vectorstore_custom_params_async( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """Custom batch size and concurrency params.""" + emb = SomeEmbeddings(dimension=2) + v_store = AstraDBVectorStore( + embedding=emb, + collection_name="lc_test_c_async", + batch_size=17, + bulk_insert_batch_concurrency=13, + bulk_insert_overwrite_concurrency=7, + bulk_delete_concurrency=19, + **astradb_credentials, + ) + try: + # add_texts + N = 50 + texts = [str(i + 1 / 7.0) for i in range(N)] + ids = ["doc_%i" % i for i in range(N)] + await v_store.aadd_texts(texts=texts, ids=ids) + await v_store.aadd_texts( + texts=texts, + ids=ids, + batch_size=19, + batch_concurrency=7, + overwrite_concurrency=13, + ) + # + await v_store.adelete(ids[: N // 2]) + await v_store.adelete(ids[N // 2 :], concurrency=23) + # + finally: + if not SKIP_COLLECTION_DELETE: + await v_store.adelete_collection() + else: + await v_store.aclear() + + def test_astradb_vectorstore_metrics( + self, astradb_credentials: AstraDBCredentials + ) -> None: + """ + Different choices of similarity metric. + Both stores (with "cosine" and "euclidea" metrics) contain these two: + - a vector slightly rotated w.r.t query vector + - a vector which is a long multiple of query vector + so, which one is "the closest one" depends on the metric. + """ + emb = ParserEmbeddings(dimension=2) + isq2 = 0.5**0.5 + isa = 0.7 + isb = (1.0 - isa * isa) ** 0.5 + texts = [ + json.dumps([isa, isb]), + json.dumps([10 * isq2, 10 * isq2]), + ] + ids = [ + "rotated", + "scaled", + ] + query_text = json.dumps([isq2, isq2]) + + # prepare empty collections + AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + **astradb_credentials, + ).clear() + AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2_EUCLIDEAN, + metric="euclidean", + **astradb_credentials, + ).clear() + + # creation, population, query - cosine + vstore_cos = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2, + metric="cosine", + **astradb_credentials, + ) + try: + vstore_cos.add_texts( + texts=texts, + ids=ids, + ) + _, _, id_from_cos = vstore_cos.similarity_search_with_score_id( + query_text, + k=1, + )[0] + assert id_from_cos == "scaled" + finally: + if not SKIP_COLLECTION_DELETE: + vstore_cos.delete_collection() + else: + vstore_cos.clear() + # creation, population, query - euclidean + + vstore_euc = AstraDBVectorStore( + embedding=emb, + collection_name=COLLECTION_NAME_DIM2_EUCLIDEAN, + metric="euclidean", + **astradb_credentials, + ) + try: + vstore_euc.add_texts( + texts=texts, + ids=ids, + ) + _, _, id_from_euc = vstore_euc.similarity_search_with_score_id( + query_text, + k=1, + )[0] + assert id_from_euc == "rotated" + finally: + if not SKIP_COLLECTION_DELETE: + vstore_euc.delete_collection() + else: + vstore_euc.clear() diff --git a/libs/partners/astradb/tests/unit_tests/__init__.py b/libs/partners/astradb/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/astradb/tests/unit_tests/test_imports.py b/libs/partners/astradb/tests/unit_tests/test_imports.py new file mode 100644 index 0000000000000..2240748c70c6e --- /dev/null +++ b/libs/partners/astradb/tests/unit_tests/test_imports.py @@ -0,0 +1,9 @@ +from langchain_astradb import __all__ + +EXPECTED_ALL = [ + "AstraDBVectorStore", +] + + +def test_all_imports() -> None: + assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/astradb/tests/unit_tests/test_vectorstores.py b/libs/partners/astradb/tests/unit_tests/test_vectorstores.py new file mode 100644 index 0000000000000..ebfc6978d18c7 --- /dev/null +++ b/libs/partners/astradb/tests/unit_tests/test_vectorstores.py @@ -0,0 +1,45 @@ +from typing import List +from unittest.mock import Mock + +from langchain_core.embeddings import Embeddings + +from langchain_astradb.vectorstores import AstraDBVectorStore + + +class SomeEmbeddings(Embeddings): + """ + Turn a sentence into an embedding vector in some way. + Not important how. It is deterministic is all that counts. + """ + + def __init__(self, dimension: int) -> None: + self.dimension = dimension + + def embed_documents(self, texts: List[str]) -> List[List[float]]: + return [self.embed_query(txt) for txt in texts] + + async def aembed_documents(self, texts: List[str]) -> List[List[float]]: + return self.embed_documents(texts) + + def embed_query(self, text: str) -> List[float]: + unnormed0 = [ord(c) for c in text[: self.dimension]] + unnormed = (unnormed0 + [1] + [0] * (self.dimension - 1 - len(unnormed0)))[ + : self.dimension + ] + norm = sum(x * x for x in unnormed) ** 0.5 + normed = [x / norm for x in unnormed] + return normed + + async def aembed_query(self, text: str) -> List[float]: + return self.embed_query(text) + + +def test_initialization() -> None: + """Test integration vectorstore initialization.""" + mock_astra_db = Mock() + embedding = SomeEmbeddings(dimension=2) + AstraDBVectorStore( + embedding=embedding, + collection_name="mock_coll_name", + astra_db_client=mock_astra_db, + ) diff --git a/libs/partners/exa/langchain_exa/retrievers.py b/libs/partners/exa/langchain_exa/retrievers.py index e316d0ab84c2c..0011ddc008e8d 100644 --- a/libs/partners/exa/langchain_exa/retrievers.py +++ b/libs/partners/exa/langchain_exa/retrievers.py @@ -4,7 +4,7 @@ from exa_py.api import HighlightsContentsOptions, TextContentsOptions # type: ignore from langchain_core.callbacks import CallbackManagerForRetrieverRun from langchain_core.documents import Document -from langchain_core.pydantic_v1 import SecretStr, root_validator +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.retrievers import BaseRetriever from langchain_exa._utilities import initialize_client @@ -53,8 +53,8 @@ class ExaSearchRetriever(BaseRetriever): text_contents_options: Union[TextContentsOptions, Literal[True]] = True """How to set the page content of the results""" - client: Exa - exa_api_key: SecretStr + client: Exa = Field(default=None) + exa_api_key: SecretStr = Field(default=None) exa_base_url: Optional[str] = None @root_validator(pre=True) @@ -66,11 +66,11 @@ def validate_environment(cls, values: Dict) -> Dict: def _get_relevant_documents( self, query: str, *, run_manager: CallbackManagerForRetrieverRun ) -> List[Document]: - response = self.client.search_and_contents( + response = self.client.search_and_contents( # type: ignore[misc] query, num_results=self.k, text=self.text_contents_options, - highlights=self.highlights, + highlights=self.highlights, # type: ignore include_domains=self.include_domains, exclude_domains=self.exclude_domains, start_crawl_date=self.start_crawl_date, diff --git a/libs/partners/exa/langchain_exa/tools.py b/libs/partners/exa/langchain_exa/tools.py index 20636afbfb289..8cff78fd9005b 100644 --- a/libs/partners/exa/langchain_exa/tools.py +++ b/libs/partners/exa/langchain_exa/tools.py @@ -7,7 +7,7 @@ from langchain_core.callbacks import ( CallbackManagerForToolRun, ) -from langchain_core.pydantic_v1 import SecretStr, root_validator +from langchain_core.pydantic_v1 import Field, SecretStr, root_validator from langchain_core.tools import BaseTool from langchain_exa._utilities import initialize_client @@ -22,8 +22,8 @@ class ExaSearchResults(BaseTool): "Input should be an Exa-optimized query. " "Output is a JSON array of the query results" ) - client: Exa - exa_api_key: SecretStr + client: Exa = Field(default=None) + exa_api_key: SecretStr = Field(default=None) @root_validator(pre=True) def validate_environment(cls, values: Dict) -> Dict: @@ -51,8 +51,8 @@ def _run( return self.client.search_and_contents( query, num_results=num_results, - text=text_contents_options, - highlights=highlights, + text=text_contents_options, # type: ignore + highlights=highlights, # type: ignore include_domains=include_domains, exclude_domains=exclude_domains, start_crawl_date=start_crawl_date, @@ -60,7 +60,7 @@ def _run( start_published_date=start_published_date, end_published_date=end_published_date, use_autoprompt=use_autoprompt, - ) + ) # type: ignore except Exception as e: return repr(e) @@ -74,8 +74,8 @@ class ExaFindSimilarResults(BaseTool): "Input should be an Exa-optimized query. " "Output is a JSON array of the query results" ) - client: Exa - exa_api_key: SecretStr + client: Exa = Field(default=None) + exa_api_key: SecretStr = Field(default=None) exa_base_url: Optional[str] = None @root_validator(pre=True) @@ -105,8 +105,8 @@ def _run( return self.client.find_similar_and_contents( url, num_results=num_results, - text=text_contents_options, - highlights=highlights, + text=text_contents_options, # type: ignore + highlights=highlights, # type: ignore include_domains=include_domains, exclude_domains=exclude_domains, start_crawl_date=start_crawl_date, @@ -115,6 +115,6 @@ def _run( end_published_date=end_published_date, exclude_source_domain=exclude_source_domain, category=category, - ) + ) # type: ignore except Exception as e: return repr(e) diff --git a/libs/partners/exa/poetry.lock b/libs/partners/exa/poetry.lock index cf05266109d79..874478edf26bf 100644 --- a/libs/partners/exa/poetry.lock +++ b/libs/partners/exa/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -176,13 +176,13 @@ files = [ [[package]] name = "exa-py" -version = "1.0.7" +version = "1.0.8" description = "Python SDK for Exa API." optional = false python-versions = "*" files = [ - {file = "exa_py-1.0.7-py3-none-any.whl", hash = "sha256:7d5a2a21f602a2c14dd1c919e6a79f06d4b10a5441f21be2e5bbb2d76a20596f"}, - {file = "exa_py-1.0.7.tar.gz", hash = "sha256:c2fc8861b9f3abb38917716dfbe31422eb4b9afef33aec23c279b3439974a835"}, + {file = "exa_py-1.0.8-py3-none-any.whl", hash = "sha256:1a1543f312c75eb524999495dd53a094a8aab762e7e2317d1f20a1e262892d77"}, + {file = "exa_py-1.0.8.tar.gz", hash = "sha256:cd5e0cd9463c8e6996a25cb1af25c048b3f0386ac737636f42ab28c965489fd1"}, ] [package.dependencies] @@ -265,7 +265,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.15" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -275,7 +275,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -291,13 +291,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.83" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -306,52 +306,49 @@ requests = ">=2,<3" [[package]] name = "mypy" -version = "0.991" +version = "1.8.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, - {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, - {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, - {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, - {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, - {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, - {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, - {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, - {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, - {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, - {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, - {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, - {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, - {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, - {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, - {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, - {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, - {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, - {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, - {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, - {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, - {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, - {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, - {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, - {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, - {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, - {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] -mypy-extensions = ">=0.4.3" +mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -378,13 +375,13 @@ files = [ [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -393,18 +390,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -412,116 +409,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -586,13 +557,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -696,28 +667,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.14" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -744,17 +715,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -794,54 +765,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -850,4 +824,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "672a90de20a3bab7e1d9538f8e6902bc2dac337fc991231bf951939b397b652c" +content-hash = "06818acc2d818513a1a78398d32b59cd837413cb5bd94026d63e963020f01045" diff --git a/libs/partners/exa/pyproject.toml b/libs/partners/exa/pyproject.toml index f9e762d074baf..4cc65162a4459 100644 --- a/libs/partners/exa/pyproject.toml +++ b/libs/partners/exa/pyproject.toml @@ -12,8 +12,8 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" -exa-py = "^1.0.7" +langchain-core = "^0.1" +exa-py = "^1.0.8" [tool.poetry.group.test] optional = true @@ -40,7 +40,7 @@ optional = true ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] -mypy = "^0.991" +mypy = "^1" langchain-core = { path = "../../core", develop = true } [tool.poetry.group.dev] diff --git a/libs/partners/google-genai/langchain_google_genai/__init__.py b/libs/partners/google-genai/langchain_google_genai/__init__.py index 505e121d1e2ac..187f7e3e036b9 100644 --- a/libs/partners/google-genai/langchain_google_genai/__init__.py +++ b/libs/partners/google-genai/langchain_google_genai/__init__.py @@ -54,6 +54,8 @@ embeddings.embed_query("hello, world!") ``` """ # noqa: E501 + +from langchain_google_genai._enums import HarmBlockThreshold, HarmCategory from langchain_google_genai.chat_models import ChatGoogleGenerativeAI from langchain_google_genai.embeddings import GoogleGenerativeAIEmbeddings from langchain_google_genai.llms import GoogleGenerativeAI @@ -62,4 +64,6 @@ "ChatGoogleGenerativeAI", "GoogleGenerativeAIEmbeddings", "GoogleGenerativeAI", + "HarmBlockThreshold", + "HarmCategory", ] diff --git a/libs/partners/google-genai/langchain_google_genai/_enums.py b/libs/partners/google-genai/langchain_google_genai/_enums.py new file mode 100644 index 0000000000000..b2a1de2614e99 --- /dev/null +++ b/libs/partners/google-genai/langchain_google_genai/_enums.py @@ -0,0 +1,6 @@ +from google.generativeai.types.safety_types import ( # type: ignore + HarmBlockThreshold, + HarmCategory, +) + +__all__ = ["HarmBlockThreshold", "HarmCategory"] diff --git a/libs/partners/google-genai/langchain_google_genai/chat_models.py b/libs/partners/google-genai/langchain_google_genai/chat_models.py index 420dbcfd13595..4f0e9f0a22300 100644 --- a/libs/partners/google-genai/langchain_google_genai/chat_models.py +++ b/libs/partners/google-genai/langchain_google_genai/chat_models.py @@ -517,6 +517,7 @@ def _identifying_params(self) -> Dict[str, Any]: "temperature": self.temperature, "top_k": self.top_k, "n": self.n, + "safety_settings": self.safety_settings, } def _prepare_params( @@ -549,7 +550,7 @@ def _generate( params, chat, message = self._prepare_chat( messages, stop=stop, - functions=kwargs.get("functions"), + **kwargs, ) response: genai.types.GenerateContentResponse = _chat_with_retry( content=message, @@ -568,7 +569,7 @@ async def _agenerate( params, chat, message = self._prepare_chat( messages, stop=stop, - functions=kwargs.get("functions"), + **kwargs, ) response: genai.types.GenerateContentResponse = await _achat_with_retry( content=message, @@ -587,7 +588,7 @@ def _stream( params, chat, message = self._prepare_chat( messages, stop=stop, - functions=kwargs.get("functions"), + **kwargs, ) response: genai.types.GenerateContentResponse = _chat_with_retry( content=message, @@ -598,6 +599,7 @@ def _stream( for chunk in response: _chat_result = _response_to_result(chunk, stream=True) gen = cast(ChatGenerationChunk, _chat_result.generations[0]) + if run_manager: run_manager.on_llm_new_token(gen.text) yield gen @@ -612,7 +614,7 @@ async def _astream( params, chat, message = self._prepare_chat( messages, stop=stop, - functions=kwargs.get("functions"), + **kwargs, ) async for chunk in await _achat_with_retry( content=message, @@ -622,6 +624,7 @@ async def _astream( ): _chat_result = _response_to_result(chunk, stream=True) gen = cast(ChatGenerationChunk, _chat_result.generations[0]) + if run_manager: await run_manager.on_llm_new_token(gen.text) yield gen @@ -634,9 +637,14 @@ def _prepare_chat( ) -> Tuple[Dict[str, Any], genai.ChatSession, genai.types.ContentDict]: client = self.client functions = kwargs.pop("functions", None) - if functions: - tools = convert_to_genai_function_declarations(functions) - client = genai.GenerativeModel(model_name=self.model, tools=tools) + safety_settings = kwargs.pop("safety_settings", self.safety_settings) + if functions or safety_settings: + tools = ( + convert_to_genai_function_declarations(functions) if functions else None + ) + client = genai.GenerativeModel( + model_name=self.model, tools=tools, safety_settings=safety_settings + ) params = self._prepare_params(stop, **kwargs) history = _parse_chat_history( diff --git a/libs/partners/google-genai/langchain_google_genai/llms.py b/libs/partners/google-genai/langchain_google_genai/llms.py index d6eb16d981930..9b48387314672 100644 --- a/libs/partners/google-genai/langchain_google_genai/llms.py +++ b/libs/partners/google-genai/langchain_google_genai/llms.py @@ -15,6 +15,11 @@ from langchain_core.pydantic_v1 import BaseModel, Field, SecretStr, root_validator from langchain_core.utils import get_from_dict_or_env +from langchain_google_genai._enums import ( + HarmBlockThreshold, + HarmCategory, +) + class GoogleModelFamily(str, Enum): GEMINI = auto() @@ -77,7 +82,10 @@ def _completion_with_retry( try: if is_gemini: return llm.client.generate_content( - contents=prompt, stream=stream, generation_config=generation_config + contents=prompt, + stream=stream, + generation_config=generation_config, + safety_settings=kwargs.pop("safety_settings", None), ) return llm.client.generate_text(prompt=prompt, **kwargs) except google.api_core.exceptions.FailedPrecondition as exc: @@ -143,6 +151,22 @@ class _BaseGoogleGenerativeAI(BaseModel): description="A string, one of: [`rest`, `grpc`, `grpc_asyncio`].", ) + safety_settings: Optional[Dict[HarmCategory, HarmBlockThreshold]] = None + """The default safety settings to use for all generations. + + For example: + + from google.generativeai.types.safety_types import HarmBlockThreshold, HarmCategory + + safety_settings = { + HarmCategory.HARM_CATEGORY_UNSPECIFIED: HarmBlockThreshold.BLOCK_NONE, + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_MEDIUM_AND_ABOVE, + HarmCategory.HARM_CATEGORY_HATE_SPEECH: HarmBlockThreshold.BLOCK_ONLY_HIGH, + HarmCategory.HARM_CATEGORY_HARASSMENT: HarmBlockThreshold.BLOCK_LOW_AND_ABOVE, + HarmCategory.HARM_CATEGORY_SEXUALLY_EXPLICIT: HarmBlockThreshold.BLOCK_NONE, + } + """ # noqa: E501 + @property def lc_secrets(self) -> Dict[str, str]: return {"google_api_key": "GOOGLE_API_KEY"} @@ -184,6 +208,8 @@ def validate_environment(cls, values: Dict) -> Dict: ) model_name = values["model"] + safety_settings = values["safety_settings"] + if isinstance(google_api_key, SecretStr): google_api_key = google_api_key.get_secret_value() @@ -193,8 +219,15 @@ def validate_environment(cls, values: Dict) -> Dict: client_options=values.get("client_options"), ) + if safety_settings and ( + not GoogleModelFamily(model_name) == GoogleModelFamily.GEMINI + ): + raise ValueError("Safety settings are only supported for Gemini models") + if GoogleModelFamily(model_name) == GoogleModelFamily.GEMINI: - values["client"] = genai.GenerativeModel(model_name=model_name) + values["client"] = genai.GenerativeModel( + model_name=model_name, safety_settings=safety_settings + ) else: values["client"] = genai @@ -237,6 +270,7 @@ def _generate( is_gemini=True, run_manager=run_manager, generation_config=generation_config, + safety_settings=kwargs.pop("safety_settings", None), ) candidates = [ "".join([p.text for p in c.content.parts]) for c in res.candidates @@ -278,6 +312,7 @@ def _stream( is_gemini=True, run_manager=run_manager, generation_config=generation_config, + safety_settings=kwargs.pop("safety_settings", None), **kwargs, ): chunk = GenerationChunk(text=stream_resp.text) diff --git a/libs/partners/google-genai/poetry.lock b/libs/partners/google-genai/poetry.lock index 60a87e151ec4f..b4020e98505b6 100644 --- a/libs/partners/google-genai/poetry.lock +++ b/libs/partners/google-genai/poetry.lock @@ -448,7 +448,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.22" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1146,13 +1146,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1177,13 +1177,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.2.0.20240206" +version = "10.2.0.20240213" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240206.tar.gz", hash = "sha256:f0de5107ff8362ffdbbd53ec896202ac905e6ab22ae784b46bcdad160ea143b9"}, - {file = "types_Pillow-10.2.0.20240206-py3-none-any.whl", hash = "sha256:abc339ae28af5916146a7729261480d68ac902cd4ff57e0bdd402eee7962644d"}, + {file = "types-Pillow-10.2.0.20240213.tar.gz", hash = "sha256:4800b61bf7eabdae2f1b17ade0d080709ed33e9f26a2e900e470e8b56ebe2387"}, + {file = "types_Pillow-10.2.0.20240213-py3-none-any.whl", hash = "sha256:062c5a0f20301a30f2df4db583f15b3c2a1283a12518d1f9d81396154e12c1af"}, ] [[package]] diff --git a/libs/partners/google-genai/pyproject.toml b/libs/partners/google-genai/pyproject.toml index deee61e7f7ed8..43248562bc849 100644 --- a/libs/partners/google-genai/pyproject.toml +++ b/libs/partners/google-genai/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-google-genai" -version = "0.0.8" +version = "0.0.9" description = "An integration package connecting Google's genai package and LangChain" authors = [] readme = "README.md" @@ -70,9 +70,9 @@ types-google-cloud-ndb = "^2.2.0.1" [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] diff --git a/libs/partners/google-genai/tests/integration_tests/test_chat_models.py b/libs/partners/google-genai/tests/integration_tests/test_chat_models.py index 91e6aab20fbfd..26e85aa10e181 100644 --- a/libs/partners/google-genai/tests/integration_tests/test_chat_models.py +++ b/libs/partners/google-genai/tests/integration_tests/test_chat_models.py @@ -1,12 +1,16 @@ """Test ChatGoogleGenerativeAI chat model.""" +from typing import Generator + import pytest from langchain_core.messages import AIMessage, HumanMessage, SystemMessage -from langchain_google_genai.chat_models import ( +from langchain_google_genai import ( ChatGoogleGenerativeAI, - ChatGoogleGenerativeAIError, + HarmBlockThreshold, + HarmCategory, ) +from langchain_google_genai.chat_models import ChatGoogleGenerativeAIError _MODEL = "gemini-pro" # TODO: Use nano when it's available. _VISION_MODEL = "gemini-pro-vision" @@ -193,3 +197,32 @@ def test_generativeai_get_num_tokens_gemini() -> None: llm = ChatGoogleGenerativeAI(temperature=0, model="gemini-pro") output = llm.get_num_tokens("How are you?") assert output == 4 + + +def test_safety_settings_gemini() -> None: + safety_settings = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, + } + # test with safety filters on bind + llm = ChatGoogleGenerativeAI(temperature=0, model="gemini-pro").bind( + safety_settings=safety_settings + ) + output = llm.invoke("how to make a bomb?") + assert isinstance(output, AIMessage) + assert len(output.content) > 0 + + # test direct to stream + streamed_messages = [] + output_stream = llm.stream("how to make a bomb?", safety_settings=safety_settings) + assert isinstance(output_stream, Generator) + for message in output_stream: + streamed_messages.append(message) + assert len(streamed_messages) > 0 + + # test as init param + llm = ChatGoogleGenerativeAI( + temperature=0, model="gemini-pro", safety_settings=safety_settings + ) + out2 = llm.invoke("how to make a bomb") + assert isinstance(out2, AIMessage) + assert len(out2.content) > 0 diff --git a/libs/partners/google-genai/tests/integration_tests/test_llms.py b/libs/partners/google-genai/tests/integration_tests/test_llms.py index 9bdf49dda84cf..b761e14804132 100644 --- a/libs/partners/google-genai/tests/integration_tests/test_llms.py +++ b/libs/partners/google-genai/tests/integration_tests/test_llms.py @@ -4,10 +4,12 @@ valid API key. """ +from typing import Generator + import pytest from langchain_core.outputs import LLMResult -from langchain_google_genai.llms import GoogleGenerativeAI +from langchain_google_genai import GoogleGenerativeAI, HarmBlockThreshold, HarmCategory model_names = ["models/text-bison-001", "gemini-pro"] @@ -66,3 +68,39 @@ def test_generativeai_get_num_tokens_gemini() -> None: llm = GoogleGenerativeAI(temperature=0, model="gemini-pro") output = llm.get_num_tokens("How are you?") assert output == 4 + + +def test_safety_settings_gemini() -> None: + # test with blocked prompt + llm = GoogleGenerativeAI(temperature=0, model="gemini-pro") + output = llm.generate(prompts=["how to make a bomb?"]) + assert isinstance(output, LLMResult) + assert len(output.generations[0]) == 0 + + # safety filters + safety_settings = { + HarmCategory.HARM_CATEGORY_DANGEROUS_CONTENT: HarmBlockThreshold.BLOCK_NONE, + } + + # test with safety filters directly to generate + output = llm.generate(["how to make a bomb?"], safety_settings=safety_settings) + assert isinstance(output, LLMResult) + assert len(output.generations[0]) > 0 + + # test with safety filters directly to stream + streamed_messages = [] + output_stream = llm.stream("how to make a bomb?", safety_settings=safety_settings) + assert isinstance(output_stream, Generator) + for message in output_stream: + streamed_messages.append(message) + assert len(streamed_messages) > 0 + + # test with safety filters on instantiation + llm = GoogleGenerativeAI( + model="gemini-pro", + safety_settings=safety_settings, + temperature=0, + ) + output = llm.generate(prompts=["how to make a bomb?"]) + assert isinstance(output, LLMResult) + assert len(output.generations[0]) > 0 diff --git a/libs/partners/google-genai/tests/unit_tests/test_imports.py b/libs/partners/google-genai/tests/unit_tests/test_imports.py index e189a9fc7753e..8c90cb2bd10d6 100644 --- a/libs/partners/google-genai/tests/unit_tests/test_imports.py +++ b/libs/partners/google-genai/tests/unit_tests/test_imports.py @@ -4,6 +4,8 @@ "ChatGoogleGenerativeAI", "GoogleGenerativeAIEmbeddings", "GoogleGenerativeAI", + "HarmBlockThreshold", + "HarmCategory", ] diff --git a/libs/partners/google-vertexai/poetry.lock b/libs/partners/google-vertexai/poetry.lock index 7cfb61f482154..794515251cdec 100644 --- a/libs/partners/google-vertexai/poetry.lock +++ b/libs/partners/google-vertexai/poetry.lock @@ -486,13 +486,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-api-python-client" -version = "2.116.0" +version = "2.118.0" description = "Google API Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "google-api-python-client-2.116.0.tar.gz", hash = "sha256:f9f32361e16114d62929638fe07f77be30216b079ad316dc2ced859d9f72e5ad"}, - {file = "google_api_python_client-2.116.0-py2.py3-none-any.whl", hash = "sha256:846e44417c6b7385fa5f5a46cb6b9d23327754c560830245ee53a577c5e44cec"}, + {file = "google-api-python-client-2.118.0.tar.gz", hash = "sha256:ebf4927a3f5184096647be8f705d090e7f06d48ad82b0fa431a2fe80c2cbe182"}, + {file = "google_api_python_client-2.118.0-py2.py3-none-any.whl", hash = "sha256:9d83b178496b180e058fd206ebfb70ea1afab49f235dd326f557513f56f496d5"}, ] [package.dependencies] @@ -1016,7 +1016,7 @@ files = [ [[package]] name = "langchain" -version = "0.1.6" +version = "0.1.7" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1028,7 +1028,7 @@ aiohttp = "^3.8.3" async-timeout = {version = "^4.0.0", markers = "python_version < \"3.11\""} dataclasses-json = ">= 0.5.7, < 0.7" jsonpatch = "^1.33" -langchain-community = ">=0.0.18,<0.1" +langchain-community = ">=0.0.20,<0.1" langchain-core = ">=0.1.22,<0.2" langsmith = ">=0.0.83,<0.1" numpy = "^1" @@ -1059,7 +1059,7 @@ url = "../../langchain" [[package]] name = "langchain-community" -version = "0.0.19" +version = "0.0.20" description = "Community contributed LangChain integrations." optional = false python-versions = ">=3.8.1,<4.0" @@ -1079,7 +1079,7 @@ tenacity = "^8.1.0" [package.extras] cli = ["typer (>=0.9.0,<0.10.0)"] -extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] +extended-testing = ["aiosqlite (>=0.19.0,<0.20.0)", "aleph-alpha-client (>=2.15.0,<3.0.0)", "anthropic (>=0.3.11,<0.4.0)", "arxiv (>=1.4,<2.0)", "assemblyai (>=0.17.0,<0.18.0)", "atlassian-python-api (>=3.36.0,<4.0.0)", "azure-ai-documentintelligence (>=1.0.0b1,<2.0.0)", "beautifulsoup4 (>=4,<5)", "bibtexparser (>=1.4.0,<2.0.0)", "cassio (>=0.1.0,<0.2.0)", "chardet (>=5.1.0,<6.0.0)", "cohere (>=4,<5)", "databricks-vectorsearch (>=0.21,<0.22)", "datasets (>=2.15.0,<3.0.0)", "dgml-utils (>=0.3.0,<0.4.0)", "elasticsearch (>=8.12.0,<9.0.0)", "esprima (>=4.0.1,<5.0.0)", "faiss-cpu (>=1,<2)", "feedparser (>=6.0.10,<7.0.0)", "fireworks-ai (>=0.9.0,<0.10.0)", "geopandas (>=0.13.1,<0.14.0)", "gitpython (>=3.1.32,<4.0.0)", "google-cloud-documentai (>=2.20.1,<3.0.0)", "gql (>=3.4.1,<4.0.0)", "gradientai (>=1.4.0,<2.0.0)", "hdbcli (>=2.19.21,<3.0.0)", "hologres-vector (>=0.0.6,<0.0.7)", "html2text (>=2020.1.16,<2021.0.0)", "httpx (>=0.24.1,<0.25.0)", "javelin-sdk (>=0.1.8,<0.2.0)", "jinja2 (>=3,<4)", "jq (>=1.4.1,<2.0.0)", "jsonschema (>1)", "lxml (>=4.9.2,<5.0.0)", "markdownify (>=0.11.6,<0.12.0)", "motor (>=3.3.1,<4.0.0)", "msal (>=1.25.0,<2.0.0)", "mwparserfromhell (>=0.6.4,<0.7.0)", "mwxml (>=0.3.3,<0.4.0)", "newspaper3k (>=0.2.8,<0.3.0)", "numexpr (>=2.8.6,<3.0.0)", "nvidia-riva-client (>=2.14.0,<3.0.0)", "oci (>=2.119.1,<3.0.0)", "openai (<2)", "openapi-pydantic (>=0.3.2,<0.4.0)", "oracle-ads (>=2.9.1,<3.0.0)", "pandas (>=2.0.1,<3.0.0)", "pdfminer-six (>=20221105,<20221106)", "pgvector (>=0.1.6,<0.2.0)", "praw (>=7.7.1,<8.0.0)", "psychicapi (>=0.8.0,<0.9.0)", "py-trello (>=0.19.0,<0.20.0)", "pymupdf (>=1.22.3,<2.0.0)", "pypdf (>=3.4.0,<4.0.0)", "pypdfium2 (>=4.10.0,<5.0.0)", "pyspark (>=3.4.0,<4.0.0)", "rank-bm25 (>=0.2.2,<0.3.0)", "rapidfuzz (>=3.1.1,<4.0.0)", "rapidocr-onnxruntime (>=1.3.2,<2.0.0)", "rdflib (==7.0.0)", "requests-toolbelt (>=1.0.0,<2.0.0)", "rspace_client (>=2.5.0,<3.0.0)", "scikit-learn (>=1.2.2,<2.0.0)", "sqlite-vss (>=0.1.2,<0.2.0)", "streamlit (>=1.18.0,<2.0.0)", "sympy (>=1.12,<2.0)", "telethon (>=1.28.5,<2.0.0)", "timescale-vector (>=0.0.1,<0.0.2)", "tqdm (>=4.48.0)", "tree-sitter (>=0.20.2,<0.21.0)", "tree-sitter-languages (>=1.8.0,<2.0.0)", "upstash-redis (>=0.15.0,<0.16.0)", "xata (>=1.0.0a7,<2.0.0)", "xmltodict (>=0.13.0,<0.14.0)", "zhipuai (>=1.0.7,<2.0.0)"] [package.source] type = "directory" @@ -1087,7 +1087,7 @@ url = "../../community" [[package]] name = "langchain-core" -version = "0.1.22" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1802,18 +1802,18 @@ files = [ [[package]] name = "setuptools" -version = "69.0.3" +version = "69.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, + {file = "setuptools-69.1.0-py3-none-any.whl", hash = "sha256:c054629b81b946d63a9c6e732bc8b2513a7c3ea645f11d0139a2191d735c60c6"}, + {file = "setuptools-69.1.0.tar.gz", hash = "sha256:850894c4195f09c4ed30dba56213bf7c3f21d86ed6bdaafb5df5972593bfc401"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -1897,60 +1897,60 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.25" +version = "2.0.27" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" files = [ - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4344d059265cc8b1b1be351bfb88749294b87a8b2bbe21dfbe066c4199541ebd"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6f9e2e59cbcc6ba1488404aad43de005d05ca56e069477b33ff74e91b6319735"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84daa0a2055df9ca0f148a64fdde12ac635e30edbca80e87df9b3aaf419e144a"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc8b7dabe8e67c4832891a5d322cec6d44ef02f432b4588390017f5cec186a84"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f5693145220517b5f42393e07a6898acdfe820e136c98663b971906120549da5"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:db854730a25db7c956423bb9fb4bdd1216c839a689bf9cc15fada0a7fb2f4570"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win32.whl", hash = "sha256:14a6f68e8fc96e5e8f5647ef6cda6250c780612a573d99e4d881581432ef1669"}, - {file = "SQLAlchemy-2.0.25-cp310-cp310-win_amd64.whl", hash = "sha256:87f6e732bccd7dcf1741c00f1ecf33797383128bd1c90144ac8adc02cbb98643"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:342d365988ba88ada8af320d43df4e0b13a694dbd75951f537b2d5e4cb5cd002"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f37c0caf14b9e9b9e8f6dbc81bc56db06acb4363eba5a633167781a48ef036ed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa9373708763ef46782d10e950b49d0235bfe58facebd76917d3f5cbf5971aed"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d24f571990c05f6b36a396218f251f3e0dda916e0c687ef6fdca5072743208f5"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75432b5b14dc2fff43c50435e248b45c7cdadef73388e5610852b95280ffd0e9"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:884272dcd3ad97f47702965a0e902b540541890f468d24bd1d98bcfe41c3f018"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win32.whl", hash = "sha256:e607cdd99cbf9bb80391f54446b86e16eea6ad309361942bf88318bcd452363c"}, - {file = "SQLAlchemy-2.0.25-cp311-cp311-win_amd64.whl", hash = "sha256:7d505815ac340568fd03f719446a589162d55c52f08abd77ba8964fbb7eb5b5f"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0dacf67aee53b16f365c589ce72e766efaabd2b145f9de7c917777b575e3659d"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b801154027107461ee992ff4b5c09aa7cc6ec91ddfe50d02bca344918c3265c6"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59a21853f5daeb50412d459cfb13cb82c089ad4c04ec208cd14dddd99fc23b39"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29049e2c299b5ace92cbed0c1610a7a236f3baf4c6b66eb9547c01179f638ec5"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b64b183d610b424a160b0d4d880995e935208fc043d0302dd29fee32d1ee3f95"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f7a7d7fcc675d3d85fbf3b3828ecd5990b8d61bd6de3f1b260080b3beccf215"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win32.whl", hash = "sha256:cf18ff7fc9941b8fc23437cc3e68ed4ebeff3599eec6ef5eebf305f3d2e9a7c2"}, - {file = "SQLAlchemy-2.0.25-cp312-cp312-win_amd64.whl", hash = "sha256:91f7d9d1c4dd1f4f6e092874c128c11165eafcf7c963128f79e28f8445de82d5"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bb209a73b8307f8fe4fe46f6ad5979649be01607f11af1eb94aa9e8a3aaf77f0"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:798f717ae7c806d67145f6ae94dc7c342d3222d3b9a311a784f371a4333212c7"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fdd402169aa00df3142149940b3bf9ce7dde075928c1886d9a1df63d4b8de62"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d3cab3076af2e4aa5693f89622bef7fa770c6fec967143e4da7508b3dceb9b9"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:74b080c897563f81062b74e44f5a72fa44c2b373741a9ade701d5f789a10ba23"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win32.whl", hash = "sha256:87d91043ea0dc65ee583026cb18e1b458d8ec5fc0a93637126b5fc0bc3ea68c4"}, - {file = "SQLAlchemy-2.0.25-cp37-cp37m-win_amd64.whl", hash = "sha256:75f99202324383d613ddd1f7455ac908dca9c2dd729ec8584c9541dd41822a2c"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:420362338681eec03f53467804541a854617faed7272fe71a1bfdb07336a381e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c88f0c7dcc5f99bdb34b4fd9b69b93c89f893f454f40219fe923a3a2fd11625"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3be4987e3ee9d9a380b66393b77a4cd6d742480c951a1c56a23c335caca4ce3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a159111a0f58fb034c93eeba211b4141137ec4b0a6e75789ab7a3ef3c7e7e3"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8b8cb63d3ea63b29074dcd29da4dc6a97ad1349151f2d2949495418fd6e48db9"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:736ea78cd06de6c21ecba7416499e7236a22374561493b456a1f7ffbe3f6cdb4"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win32.whl", hash = "sha256:10331f129982a19df4284ceac6fe87353ca3ca6b4ca77ff7d697209ae0a5915e"}, - {file = "SQLAlchemy-2.0.25-cp38-cp38-win_amd64.whl", hash = "sha256:c55731c116806836a5d678a70c84cb13f2cedba920212ba7dcad53260997666d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:605b6b059f4b57b277f75ace81cc5bc6335efcbcc4ccb9066695e515dbdb3900"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:665f0a3954635b5b777a55111ababf44b4fc12b1f3ba0a435b602b6387ffd7cf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecf6d4cda1f9f6cb0b45803a01ea7f034e2f1aed9475e883410812d9f9e3cfcf"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c51db269513917394faec5e5c00d6f83829742ba62e2ac4fa5c98d58be91662f"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:790f533fa5c8901a62b6fef5811d48980adeb2f51f1290ade8b5e7ba990ba3de"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1b1180cda6df7af84fe72e4530f192231b1f29a7496951db4ff38dac1687202d"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win32.whl", hash = "sha256:555651adbb503ac7f4cb35834c5e4ae0819aab2cd24857a123370764dc7d7e24"}, - {file = "SQLAlchemy-2.0.25-cp39-cp39-win_amd64.whl", hash = "sha256:dc55990143cbd853a5d038c05e79284baedf3e299661389654551bd02a6a68d7"}, - {file = "SQLAlchemy-2.0.25-py3-none-any.whl", hash = "sha256:a86b4240e67d4753dc3092d9511886795b3c2852abe599cffe108952f7af7ac3"}, - {file = "SQLAlchemy-2.0.25.tar.gz", hash = "sha256:a2c69a7664fb2d54b8682dd774c3b54f67f84fa123cf84dda2a5f40dcaa04e08"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d04e579e911562f1055d26dab1868d3e0bb905db3bccf664ee8ad109f035618a"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa67d821c1fd268a5a87922ef4940442513b4e6c377553506b9db3b83beebbd8"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c7a596d0be71b7baa037f4ac10d5e057d276f65a9a611c46970f012752ebf2d"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:954d9735ee9c3fa74874c830d089a815b7b48df6f6b6e357a74130e478dbd951"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5cd20f58c29bbf2680039ff9f569fa6d21453fbd2fa84dbdb4092f006424c2e6"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:03f448ffb731b48323bda68bcc93152f751436ad6037f18a42b7e16af9e91c07"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win32.whl", hash = "sha256:d997c5938a08b5e172c30583ba6b8aad657ed9901fc24caf3a7152eeccb2f1b4"}, + {file = "SQLAlchemy-2.0.27-cp310-cp310-win_amd64.whl", hash = "sha256:eb15ef40b833f5b2f19eeae65d65e191f039e71790dd565c2af2a3783f72262f"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6c5bad7c60a392850d2f0fee8f355953abaec878c483dd7c3836e0089f046bf6"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3012ab65ea42de1be81fff5fb28d6db893ef978950afc8130ba707179b4284a"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbcd77c4d94b23e0753c5ed8deba8c69f331d4fd83f68bfc9db58bc8983f49cd"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d177b7e82f6dd5e1aebd24d9c3297c70ce09cd1d5d37b43e53f39514379c029c"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:680b9a36029b30cf063698755d277885d4a0eab70a2c7c6e71aab601323cba45"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1306102f6d9e625cebaca3d4c9c8f10588735ef877f0360b5cdb4fdfd3fd7131"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win32.whl", hash = "sha256:5b78aa9f4f68212248aaf8943d84c0ff0f74efc65a661c2fc68b82d498311fd5"}, + {file = "SQLAlchemy-2.0.27-cp311-cp311-win_amd64.whl", hash = "sha256:15e19a84b84528f52a68143439d0c7a3a69befcd4f50b8ef9b7b69d2628ae7c4"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0de1263aac858f288a80b2071990f02082c51d88335a1db0d589237a3435fe71"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce850db091bf7d2a1f2fdb615220b968aeff3849007b1204bf6e3e50a57b3d32"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8dfc936870507da96aebb43e664ae3a71a7b96278382bcfe84d277b88e379b18"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4fbe6a766301f2e8a4519f4500fe74ef0a8509a59e07a4085458f26228cd7cc"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4535c49d961fe9a77392e3a630a626af5baa967172d42732b7a43496c8b28876"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:0fb3bffc0ced37e5aa4ac2416f56d6d858f46d4da70c09bb731a246e70bff4d5"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win32.whl", hash = "sha256:7f470327d06400a0aa7926b375b8e8c3c31d335e0884f509fe272b3c700a7254"}, + {file = "SQLAlchemy-2.0.27-cp312-cp312-win_amd64.whl", hash = "sha256:f9374e270e2553653d710ece397df67db9d19c60d2647bcd35bfc616f1622dcd"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e97cf143d74a7a5a0f143aa34039b4fecf11343eed66538610debc438685db4a"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7b5a3e2120982b8b6bd1d5d99e3025339f7fb8b8267551c679afb39e9c7c7f1"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e36aa62b765cf9f43a003233a8c2d7ffdeb55bc62eaa0a0380475b228663a38f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5ada0438f5b74c3952d916c199367c29ee4d6858edff18eab783b3978d0db16d"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b1d9d1bfd96eef3c3faedb73f486c89e44e64e40e5bfec304ee163de01cf996f"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win32.whl", hash = "sha256:ca891af9f3289d24a490a5fde664ea04fe2f4984cd97e26de7442a4251bd4b7c"}, + {file = "SQLAlchemy-2.0.27-cp37-cp37m-win_amd64.whl", hash = "sha256:fd8aafda7cdff03b905d4426b714601c0978725a19efc39f5f207b86d188ba01"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ec1f5a328464daf7a1e4e385e4f5652dd9b1d12405075ccba1df842f7774b4fc"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad862295ad3f644e3c2c0d8b10a988e1600d3123ecb48702d2c0f26771f1c396"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48217be1de7d29a5600b5c513f3f7664b21d32e596d69582be0a94e36b8309cb"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e56afce6431450442f3ab5973156289bd5ec33dd618941283847c9fd5ff06bf"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:611068511b5531304137bcd7fe8117c985d1b828eb86043bd944cebb7fae3910"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b86abba762ecfeea359112b2bb4490802b340850bbee1948f785141a5e020de8"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win32.whl", hash = "sha256:30d81cc1192dc693d49d5671cd40cdec596b885b0ce3b72f323888ab1c3863d5"}, + {file = "SQLAlchemy-2.0.27-cp38-cp38-win_amd64.whl", hash = "sha256:120af1e49d614d2525ac247f6123841589b029c318b9afbfc9e2b70e22e1827d"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d07ee7793f2aeb9b80ec8ceb96bc8cc08a2aec8a1b152da1955d64e4825fcbac"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb0845e934647232b6ff5150df37ceffd0b67b754b9fdbb095233deebcddbd4a"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc19ae2e07a067663dd24fca55f8ed06a288384f0e6e3910420bf4b1270cc51"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b90053be91973a6fb6020a6e44382c97739736a5a9d74e08cc29b196639eb979"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2f5c9dfb0b9ab5e3a8a00249534bdd838d943ec4cfb9abe176a6c33408430230"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33e8bde8fff203de50399b9039c4e14e42d4d227759155c21f8da4a47fc8053c"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win32.whl", hash = "sha256:d873c21b356bfaf1589b89090a4011e6532582b3a8ea568a00e0c3aab09399dd"}, + {file = "SQLAlchemy-2.0.27-cp39-cp39-win_amd64.whl", hash = "sha256:ff2f1b7c963961d41403b650842dc2039175b906ab2093635d8319bef0b7d620"}, + {file = "SQLAlchemy-2.0.27-py3-none-any.whl", hash = "sha256:1ab4e0448018d01b142c916cc7119ca573803a4745cfe341b8f95657812700ac"}, + {file = "SQLAlchemy-2.0.27.tar.gz", hash = "sha256:86a6ed69a71fe6b88bf9331594fa390a2adda4a49b5c06f98e47bf0d392534f8"}, ] [package.dependencies] @@ -2258,4 +2258,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "07523b8b14607fb23cd14921e5accdc6044e012bc4129055f225f5af8be78dad" +content-hash = "61a48589be35a09abb7ba8045face101fa2147093f58d84abac39b58c055c4b4" diff --git a/libs/partners/google-vertexai/pyproject.toml b/libs/partners/google-vertexai/pyproject.toml index 55e699f9e1f20..87193351565a7 100644 --- a/libs/partners/google-vertexai/pyproject.toml +++ b/libs/partners/google-vertexai/pyproject.toml @@ -12,7 +12,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.1.7,<0.2" +langchain-core = "^0.1.7" google-cloud-aiplatform = "^1.39.0" google-cloud-storage = "^2.14.0" diff --git a/libs/partners/ibm/.gitignore b/libs/partners/ibm/.gitignore new file mode 100644 index 0000000000000..bee8a64b79a99 --- /dev/null +++ b/libs/partners/ibm/.gitignore @@ -0,0 +1 @@ +__pycache__ diff --git a/libs/partners/ibm/LICENSE b/libs/partners/ibm/LICENSE new file mode 100644 index 0000000000000..426b65090341f --- /dev/null +++ b/libs/partners/ibm/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 LangChain, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/libs/partners/ibm/Makefile b/libs/partners/ibm/Makefile new file mode 100644 index 0000000000000..65add43791c75 --- /dev/null +++ b/libs/partners/ibm/Makefile @@ -0,0 +1,58 @@ +.PHONY: all format lint test tests integration_tests docker_tests help extended_tests + +# Default target executed when no arguments are given to make. +all: help + +# Define a variable for the test file path. +TEST_FILE ?= tests/unit_tests/ + +integration_test integration_tests: TEST_FILE=tests/integration_tests/ + +test tests integration_test integration_tests: + poetry run pytest $(TEST_FILE) + + +###################### +# LINTING AND FORMATTING +###################### + +# Define a variable for Python and notebook files. +PYTHON_FILES=. +MYPY_CACHE=.mypy_cache +lint format: PYTHON_FILES=. +lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=libs/partners/ibm --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') +lint_package: PYTHON_FILES=langchain_ibm +lint_tests: PYTHON_FILES=tests +lint_tests: MYPY_CACHE=.mypy_cache_test + +lint lint_diff lint_package lint_tests: + poetry run ruff . + poetry run ruff format $(PYTHON_FILES) --diff + poetry run ruff --select I $(PYTHON_FILES) + mkdir $(MYPY_CACHE); poetry run mypy $(PYTHON_FILES) --cache-dir $(MYPY_CACHE) + +format format_diff: + poetry run ruff format $(PYTHON_FILES) + poetry run ruff --select I --fix $(PYTHON_FILES) + +spell_check: + poetry run codespell --toml pyproject.toml + +spell_fix: + poetry run codespell --toml pyproject.toml -w + +check_imports: $(shell find langchain_ibm -name '*.py') + poetry run python ./scripts/check_imports.py $^ + +###################### +# HELP +###################### + +help: + @echo '----' + @echo 'check_imports - check imports' + @echo 'format - run code formatters' + @echo 'lint - run linters' + @echo 'test - run unit tests' + @echo 'tests - run unit tests' + @echo 'test TEST_FILE= - run all tests in file' diff --git a/libs/partners/ibm/README.md b/libs/partners/ibm/README.md new file mode 100644 index 0000000000000..7c89035922543 --- /dev/null +++ b/libs/partners/ibm/README.md @@ -0,0 +1,18 @@ +# langchain-ibm + +This package contains the LangChain integrations for IBM watsonx.ai through their `ibm-watsonx-ai` SDK. + +## Installation and Setup + +- Install the LangChain partner package +```bash +pip install langchain-ibm +``` + +## LLM + +See a [usage example](https://python.langchain.com/docs/integrations/llms/watsonxllm). + +```python +from langchain_ibm import WatsonxLLM +``` diff --git a/libs/partners/ibm/langchain_ibm/__init__.py b/libs/partners/ibm/langchain_ibm/__init__.py new file mode 100644 index 0000000000000..97a1ed0ce1364 --- /dev/null +++ b/libs/partners/ibm/langchain_ibm/__init__.py @@ -0,0 +1,3 @@ +from langchain_ibm.llms import WatsonxLLM + +__all__ = ["WatsonxLLM"] diff --git a/libs/partners/ibm/langchain_ibm/llms.py b/libs/partners/ibm/langchain_ibm/llms.py new file mode 100644 index 0000000000000..8afdcd17c0bf3 --- /dev/null +++ b/libs/partners/ibm/langchain_ibm/llms.py @@ -0,0 +1,387 @@ +import logging +import os +from typing import Any, Dict, Iterator, List, Mapping, Optional, Union + +from ibm_watsonx_ai.foundation_models import ModelInference # type: ignore +from langchain_core.callbacks import CallbackManagerForLLMRun +from langchain_core.language_models.llms import BaseLLM +from langchain_core.outputs import Generation, GenerationChunk, LLMResult +from langchain_core.pydantic_v1 import Extra, SecretStr, root_validator +from langchain_core.utils import convert_to_secret_str, get_from_dict_or_env + +logger = logging.getLogger(__name__) + + +class WatsonxLLM(BaseLLM): + """ + IBM watsonx.ai large language models. + + To use, you should have ``ibm_watsonx_ai`` python package installed, + and the environment variable ``WATSONX_APIKEY`` set with your API key, or pass + it as a named parameter to the constructor. + + + Example: + .. code-block:: python + + from ibm_watsonx_ai.metanames import GenTextParamsMetaNames + parameters = { + GenTextParamsMetaNames.DECODING_METHOD: "sample", + GenTextParamsMetaNames.MAX_NEW_TOKENS: 100, + GenTextParamsMetaNames.MIN_NEW_TOKENS: 1, + GenTextParamsMetaNames.TEMPERATURE: 0.5, + GenTextParamsMetaNames.TOP_K: 50, + GenTextParamsMetaNames.TOP_P: 1, + } + + from langchain_ibm import WatsonxLLM + watsonx_llm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + apikey="*****", + project_id="*****", + params=parameters, + ) + """ + + model_id: str = "" + """Type of model to use.""" + + deployment_id: str = "" + """Type of deployed model to use.""" + + project_id: str = "" + """ID of the Watson Studio project.""" + + space_id: str = "" + """ID of the Watson Studio space.""" + + url: Optional[SecretStr] = None + """Url to Watson Machine Learning instance""" + + apikey: Optional[SecretStr] = None + """Apikey to Watson Machine Learning instance""" + + token: Optional[SecretStr] = None + """Token to Watson Machine Learning instance""" + + password: Optional[SecretStr] = None + """Password to Watson Machine Learning instance""" + + username: Optional[SecretStr] = None + """Username to Watson Machine Learning instance""" + + instance_id: Optional[SecretStr] = None + """Instance_id of Watson Machine Learning instance""" + + version: Optional[SecretStr] = None + """Version of Watson Machine Learning instance""" + + params: Optional[dict] = None + """Model parameters to use during generate requests.""" + + verify: Union[str, bool] = "" + """User can pass as verify one of following: + the path to a CA_BUNDLE file + the path of directory with certificates of trusted CAs + True - default path to truststore will be taken + False - no verification will be made""" + + streaming: bool = False + """ Whether to stream the results or not. """ + + watsonx_model: Any + + class Config: + """Configuration for this pydantic object.""" + + extra = Extra.forbid + + @classmethod + def is_lc_serializable(cls) -> bool: + return False + + @property + def lc_secrets(self) -> Dict[str, str]: + return { + "url": "WATSONX_URL", + "apikey": "WATSONX_APIKEY", + "token": "WATSONX_TOKEN", + "password": "WATSONX_PASSWORD", + "username": "WATSONX_USERNAME", + "instance_id": "WATSONX_INSTANCE_ID", + } + + @root_validator() + def validate_environment(cls, values: Dict) -> Dict: + """Validate that credentials and python package exists in environment.""" + values["url"] = convert_to_secret_str( + get_from_dict_or_env(values, "url", "WATSONX_URL") + ) + if "cloud.ibm.com" in values.get("url", "").get_secret_value(): + values["apikey"] = convert_to_secret_str( + get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") + ) + else: + if ( + not values["token"] + and "WATSONX_TOKEN" not in os.environ + and not values["password"] + and "WATSONX_PASSWORD" not in os.environ + and not values["apikey"] + and "WATSONX_APIKEY" not in os.environ + ): + raise ValueError( + "Did not find 'token', 'password' or 'apikey'," + " please add an environment variable" + " `WATSONX_TOKEN`, 'WATSONX_PASSWORD' or 'WATSONX_APIKEY' " + "which contains it," + " or pass 'token', 'password' or 'apikey'" + " as a named parameter." + ) + elif values["token"] or "WATSONX_TOKEN" in os.environ: + values["token"] = convert_to_secret_str( + get_from_dict_or_env(values, "token", "WATSONX_TOKEN") + ) + elif values["password"] or "WATSONX_PASSWORD" in os.environ: + values["password"] = convert_to_secret_str( + get_from_dict_or_env(values, "password", "WATSONX_PASSWORD") + ) + values["username"] = convert_to_secret_str( + get_from_dict_or_env(values, "username", "WATSONX_USERNAME") + ) + elif values["apikey"] or "WATSONX_APIKEY" in os.environ: + values["apikey"] = convert_to_secret_str( + get_from_dict_or_env(values, "apikey", "WATSONX_APIKEY") + ) + values["username"] = convert_to_secret_str( + get_from_dict_or_env(values, "username", "WATSONX_USERNAME") + ) + if not values["instance_id"] or "WATSONX_INSTANCE_ID" not in os.environ: + values["instance_id"] = convert_to_secret_str( + get_from_dict_or_env(values, "instance_id", "WATSONX_INSTANCE_ID") + ) + + credentials = { + "url": values["url"].get_secret_value() if values["url"] else None, + "apikey": values["apikey"].get_secret_value() if values["apikey"] else None, + "token": values["token"].get_secret_value() if values["token"] else None, + "password": values["password"].get_secret_value() + if values["password"] + else None, + "username": values["username"].get_secret_value() + if values["username"] + else None, + "instance_id": values["instance_id"].get_secret_value() + if values["instance_id"] + else None, + "version": values["version"].get_secret_value() + if values["version"] + else None, + } + credentials_without_none_value = { + key: value for key, value in credentials.items() if value is not None + } + + watsonx_model = ModelInference( + model_id=values["model_id"], + deployment_id=values["deployment_id"], + credentials=credentials_without_none_value, + params=values["params"], + project_id=values["project_id"], + space_id=values["space_id"], + verify=values["verify"], + ) + values["watsonx_model"] = watsonx_model + + return values + + @property + def _identifying_params(self) -> Mapping[str, Any]: + """Get the identifying parameters.""" + return { + "model_id": self.model_id, + "deployment_id": self.deployment_id, + "params": self.params, + "project_id": self.project_id, + "space_id": self.space_id, + } + + @property + def _llm_type(self) -> str: + """Return type of llm.""" + return "IBM watsonx.ai" + + @staticmethod + def _extract_token_usage( + response: Optional[List[Dict[str, Any]]] = None, + ) -> Dict[str, Any]: + if response is None: + return {"generated_token_count": 0, "input_token_count": 0} + + input_token_count = 0 + generated_token_count = 0 + + def get_count_value(key: str, result: Dict[str, Any]) -> int: + return result.get(key, 0) or 0 + + for res in response: + results = res.get("results") + if results: + input_token_count += get_count_value("input_token_count", results[0]) + generated_token_count += get_count_value( + "generated_token_count", results[0] + ) + + return { + "generated_token_count": generated_token_count, + "input_token_count": input_token_count, + } + + def _get_chat_params( + self, stop: Optional[List[str]] = None + ) -> Optional[Dict[str, Any]]: + params: Optional[Dict[str, Any]] = {**self.params} if self.params else None + if stop is not None: + params = (params or {}) | {"stop_sequences": stop} + return params + + def _create_llm_result(self, response: List[dict]) -> LLMResult: + """Create the LLMResult from the choices and prompts.""" + generations = [] + for res in response: + results = res.get("results") + if results: + finish_reason = results[0].get("stop_reason") + gen = Generation( + text=results[0].get("generated_text"), + generation_info={"finish_reason": finish_reason}, + ) + generations.append([gen]) + final_token_usage = self._extract_token_usage(response) + llm_output = { + "token_usage": final_token_usage, + "model_id": self.model_id, + "deployment_id": self.deployment_id, + } + return LLMResult(generations=generations, llm_output=llm_output) + + def _stream_response_to_generation_chunk( + self, + stream_response: Dict[str, Any], + ) -> GenerationChunk: + """Convert a stream response to a generation chunk.""" + if not stream_response["results"]: + return GenerationChunk(text="") + return GenerationChunk( + text=stream_response["results"][0]["generated_text"], + generation_info=dict( + finish_reason=stream_response["results"][0].get("stop_reason", None), + llm_output={ + "model_id": self.model_id, + "deployment_id": self.deployment_id, + }, + ), + ) + + def _call( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> str: + """Call the IBM watsonx.ai inference endpoint. + Args: + prompt: The prompt to pass into the model. + stop: Optional list of stop words to use when generating. + run_manager: Optional callback manager. + Returns: + The string generated by the model. + Example: + .. code-block:: python + + response = watsonx_llm("What is a molecule") + """ + result = self._generate( + prompts=[prompt], stop=stop, run_manager=run_manager, **kwargs + ) + return result.generations[0][0].text + + def _generate( + self, + prompts: List[str], + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + stream: Optional[bool] = None, + **kwargs: Any, + ) -> LLMResult: + """Call the IBM watsonx.ai inference endpoint which then generate the response. + Args: + prompts: List of strings (prompts) to pass into the model. + stop: Optional list of stop words to use when generating. + run_manager: Optional callback manager. + Returns: + The full LLMResult output. + Example: + .. code-block:: python + + response = watsonx_llm.generate(["What is a molecule"]) + """ + params = self._get_chat_params(stop=stop) + should_stream = stream if stream is not None else self.streaming + if should_stream: + if len(prompts) > 1: + raise ValueError( + f"WatsonxLLM currently only supports single prompt, got {prompts}" + ) + generation = GenerationChunk(text="") + stream_iter = self._stream( + prompts[0], stop=stop, run_manager=run_manager, **kwargs + ) + for chunk in stream_iter: + if generation is None: + generation = chunk + else: + generation += chunk + assert generation is not None + if isinstance(generation.generation_info, dict): + llm_output = generation.generation_info.pop("llm_output") + return LLMResult(generations=[[generation]], llm_output=llm_output) + return LLMResult(generations=[[generation]]) + else: + response = self.watsonx_model.generate(prompt=prompts, params=params) + return self._create_llm_result(response) + + def _stream( + self, + prompt: str, + stop: Optional[List[str]] = None, + run_manager: Optional[CallbackManagerForLLMRun] = None, + **kwargs: Any, + ) -> Iterator[GenerationChunk]: + """Call the IBM watsonx.ai inference endpoint which then streams the response. + Args: + prompt: The prompt to pass into the model. + stop: Optional list of stop words to use when generating. + run_manager: Optional callback manager. + Returns: + The iterator which yields generation chunks. + Example: + .. code-block:: python + + response = watsonx_llm.stream("What is a molecule") + for chunk in response: + print(chunk, end='') + """ + params = self._get_chat_params(stop=stop) + for stream_resp in self.watsonx_model.generate_text_stream( + prompt=prompt, raw_response=True, params=params + ): + if not isinstance(stream_resp, dict): + stream_resp = stream_resp.dict() + chunk = self._stream_response_to_generation_chunk(stream_resp) + + if run_manager: + run_manager.on_llm_new_token(chunk.text, chunk=chunk) + yield chunk diff --git a/libs/partners/ibm/langchain_ibm/py.typed b/libs/partners/ibm/langchain_ibm/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ibm/poetry.lock b/libs/partners/ibm/poetry.lock new file mode 100644 index 0000000000000..b4f2e062d097f --- /dev/null +++ b/libs/partners/ibm/poetry.lock @@ -0,0 +1,1095 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.2.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "codespell" +version = "2.2.6" +description = "Codespell" +optional = false +python-versions = ">=3.8" +files = [ + {file = "codespell-2.2.6-py3-none-any.whl", hash = "sha256:9ee9a3e5df0990604013ac2a9f22fa8e57669c827124a2e961fe8a1da4cacc07"}, + {file = "codespell-2.2.6.tar.gz", hash = "sha256:a8c65d8eb3faa03deabab6b3bbe798bea72e1799c7e9e955d57eca4096abcff9"}, +] + +[package.extras] +dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] +hard-encoding-detection = ["chardet"] +toml = ["tomli"] +types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "freezegun" +version = "1.4.0" +description = "Let your Python tests travel through time" +optional = false +python-versions = ">=3.7" +files = [ + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, +] + +[package.dependencies] +python-dateutil = ">=2.7" + +[[package]] +name = "ibm-cos-sdk" +version = "2.13.4" +description = "IBM SDK for Python" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "ibm-cos-sdk-2.13.4.tar.gz", hash = "sha256:ee06bb89205e2bd031967e7a0d3fc47b39363be03badc49442202394a791d24a"}, +] + +[package.dependencies] +ibm-cos-sdk-core = "2.13.4" +ibm-cos-sdk-s3transfer = "2.13.4" +jmespath = ">=0.10.0,<=1.0.1" + +[[package]] +name = "ibm-cos-sdk-core" +version = "2.13.4" +description = "Low-level, data-driven core of IBM SDK for Python" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "ibm-cos-sdk-core-2.13.4.tar.gz", hash = "sha256:c0f3c03b6c21bb69d3dedd2a1bb647621e0d99e0e1c0929d2c36bd45cfd4166c"}, +] + +[package.dependencies] +jmespath = ">=0.10.0,<=1.0.1" +python-dateutil = ">=2.8.2,<3.0.0" +requests = ">=2.31.0,<3.0" +urllib3 = {version = ">=1.26.18,<2.2", markers = "python_version >= \"3.10\""} + +[[package]] +name = "ibm-cos-sdk-s3transfer" +version = "2.13.4" +description = "IBM S3 Transfer Manager" +optional = false +python-versions = ">= 3.6" +files = [ + {file = "ibm-cos-sdk-s3transfer-2.13.4.tar.gz", hash = "sha256:3c93feaf66254803b2b8523720efaf90e7374b544ac0a411099617f3b4689279"}, +] + +[package.dependencies] +ibm-cos-sdk-core = "2.13.4" + +[[package]] +name = "ibm-watson-machine-learning" +version = "1.0.347" +description = "IBM Watson Machine Learning API Client" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ibm_watson_machine_learning-1.0.347-py3-none-any.whl", hash = "sha256:5ac0b6ec9a08adaf1c2a2b414bbf0aed8d8f3d77adef29bda725c8483580169b"}, + {file = "ibm_watson_machine_learning-1.0.347.tar.gz", hash = "sha256:62ef043ddf6c093b2d7b1f6fe68ebbe4e79db5a4d9852d651a1192ad2e2ad723"}, +] + +[package.dependencies] +certifi = "*" +ibm-cos-sdk = {version = ">=2.12.0,<2.14.0", markers = "python_version >= \"3.10\""} +importlib-metadata = "*" +lomond = "*" +packaging = "*" +pandas = ">=0.24.2,<1.6.0" +requests = "*" +tabulate = "*" +urllib3 = "*" + +[package.extras] +fl = ["cloudpickle (==1.3.0)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.19.2)", "pandas (==1.3.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "psutil", "pyYAML (==5.4.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==0.23.2)", "scipy (==1.6.3)", "setproctitle", "skorch (==0.11.0)", "tabulate (==0.8.9)", "tensorflow (==2.4.4)", "torch (==1.7.1)", "websockets (==8.1)"] +fl-crypto = ["pyhelayers (==1.5.0.3)"] +fl-rt22-1 = ["GPUtil", "cloudpickle (==1.3.0)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.20.3)", "pandas (==1.3.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==3.19.5)", "psutil", "pyYAML (==5.4.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.0.2)", "scipy (==1.7.3)", "setproctitle", "skorch (==0.11.0)", "tabulate (==0.8.9)", "tensorflow (==2.7.2)", "torch (==1.10.2)", "websockets (==10.1)"] +fl-rt22-2-py3-10 = ["GPUtil", "cloudpickle (==1.3.0)", "cryptography (==39.0.1)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.23.1)", "pandas (==1.4.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==3.19.5)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.1.1)", "scipy (==1.8.1)", "setproctitle", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.9.3)", "torch (==1.12.1)", "websockets (==10.1)"] +fl-rt23-1-py3-10 = ["GPUtil", "cloudpickle (==1.3.0)", "cryptography (==39.0.1)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.23.5)", "pandas (==1.5.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.1.1)", "scipy (==1.10.1)", "setproctitle", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.12.0)", "torch (==2.0.0)", "websockets (==10.1)"] + +[[package]] +name = "ibm-watsonx-ai" +version = "0.1.8" +description = "IBM watsonx.ai API Client" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ibm_watsonx_ai-0.1.8-py3-none-any.whl", hash = "sha256:85536b00aa3c495540480e53a17b56a0990d1340e47fae0e7ea778dcd717e5dc"}, + {file = "ibm_watsonx_ai-0.1.8.tar.gz", hash = "sha256:ba4e60091165cb755985f85ef0ece1db76ad1d351dd515a55d739467196dace3"}, +] + +[package.dependencies] +ibm-watson-machine-learning = ">=1.0.335" + +[package.extras] +fl = ["cloudpickle (==1.3.0)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.19.2)", "pandas (==1.3.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "psutil", "pyYAML (==5.4.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==0.23.2)", "scipy (==1.6.3)", "setproctitle", "skorch (==0.11.0)", "tabulate (==0.8.9)", "tensorflow (==2.4.4)", "torch (==1.7.1)", "websockets (==8.1)"] +fl-crypto = ["pyhelayers (==1.5.0.3)"] +fl-rt22-1 = ["GPUtil", "cloudpickle (==1.3.0)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.20.3)", "pandas (==1.3.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==3.19.5)", "psutil", "pyYAML (==5.4.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.0.2)", "scipy (==1.7.3)", "setproctitle", "skorch (==0.11.0)", "tabulate (==0.8.9)", "tensorflow (==2.7.2)", "torch (==1.10.2)", "websockets (==10.1)"] +fl-rt22-2-py3-10 = ["GPUtil", "cloudpickle (==1.3.0)", "cryptography (==39.0.1)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.23.1)", "pandas (==1.4.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==3.19.5)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.1.1)", "scipy (==1.8.1)", "setproctitle", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.9.3)", "torch (==1.12.1)", "websockets (==10.1)"] +fl-rt23-1-py3-10 = ["GPUtil", "cloudpickle (==1.3.0)", "cryptography (==39.0.1)", "ddsketch (==1.1.2)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "jsonpickle (==1.4.2)", "lz4", "numcompress (==0.1.2)", "numpy (==1.23.5)", "pandas (==1.5.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.27.1)", "scikit-learn (==1.1.1)", "scipy (==1.10.1)", "setproctitle", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.12.0)", "torch (==2.0.0)", "websockets (==10.1)"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "importlib-metadata" +version = "7.0.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, + {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "langchain-core" +version = "0.1.23" +description = "Building applications with LLMs through composability" +optional = false +python-versions = ">=3.8.1,<4.0" +files = [] +develop = true + +[package.dependencies] +anyio = ">=3,<5" +jsonpatch = "^1.33" +langsmith = "^0.1.0" +packaging = "^23.2" +pydantic = ">=1,<3" +PyYAML = ">=5.3" +requests = "^2" +tenacity = "^8.1.0" + +[package.extras] +extended-testing = ["jinja2 (>=3,<4)"] + +[package.source] +type = "directory" +url = "../../core" + +[[package]] +name = "langsmith" +version = "0.1.1" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = ">=3.8.1,<4.0" +files = [ + {file = "langsmith-0.1.1-py3-none-any.whl", hash = "sha256:10ff2b977a41e3f6351d1a4239d9bd57af0547aa909e839d2791e16cc197a6f9"}, + {file = "langsmith-0.1.1.tar.gz", hash = "sha256:09df0c2ca9085105f97a4e4f281b083e312c99d162f3fe2b2d5eefd5c3692e60"}, +] + +[package.dependencies] +pydantic = ">=1,<3" +requests = ">=2,<3" + +[[package]] +name = "lomond" +version = "0.3.3" +description = "Websocket Client Library" +optional = false +python-versions = "*" +files = [ + {file = "lomond-0.3.3-py2.py3-none-any.whl", hash = "sha256:df1dd4dd7b802a12b71907ab1abb08b8ce9950195311207579379eb3b1553de7"}, + {file = "lomond-0.3.3.tar.gz", hash = "sha256:427936596b144b4ec387ead99aac1560b77c8a78107d3d49415d3abbe79acbd3"}, +] + +[package.dependencies] +six = ">=1.10.0" + +[[package]] +name = "mypy" +version = "0.991" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"}, + {file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"}, + {file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"}, + {file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"}, + {file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"}, + {file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"}, + {file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"}, + {file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"}, + {file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"}, + {file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"}, + {file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"}, + {file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"}, + {file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"}, + {file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"}, + {file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"}, + {file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"}, + {file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"}, + {file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"}, + {file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"}, + {file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"}, + {file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"}, + {file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"}, + {file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"}, + {file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"}, + {file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"}, + {file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"}, + {file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"}, +] + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pandas" +version = "1.5.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3749077d86e3a2f0ed51367f30bf5b82e131cc0f14260c4d3e499186fccc4406"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:972d8a45395f2a2d26733eb8d0f629b2f90bebe8e8eddbb8829b180c09639572"}, + {file = "pandas-1.5.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50869a35cbb0f2e0cd5ec04b191e7b12ed688874bd05dd777c19b28cbea90996"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ac844a0fe00bfaeb2c9b51ab1424e5c8744f89860b138434a363b1f620f354"}, + {file = "pandas-1.5.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a0a56cef15fd1586726dace5616db75ebcfec9179a3a55e78f72c5639fa2a23"}, + {file = "pandas-1.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:478ff646ca42b20376e4ed3fa2e8d7341e8a63105586efe54fa2508ee087f328"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6973549c01ca91ec96199e940495219c887ea815b2083722821f1d7abfa2b4dc"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c39a8da13cede5adcd3be1182883aea1c925476f4e84b2807a46e2775306305d"}, + {file = "pandas-1.5.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f76d097d12c82a535fda9dfe5e8dd4127952b45fea9b0276cb30cca5ea313fbc"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e474390e60ed609cec869b0da796ad94f420bb057d86784191eefc62b65819ae"}, + {file = "pandas-1.5.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f2b952406a1588ad4cad5b3f55f520e82e902388a6d5a4a91baa8d38d23c7f6"}, + {file = "pandas-1.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc4c368f42b551bf72fac35c5128963a171b40dce866fb066540eeaf46faa003"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14e45300521902689a81f3f41386dc86f19b8ba8dd5ac5a3c7010ef8d2932813"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9842b6f4b8479e41968eced654487258ed81df7d1c9b7b870ceea24ed9459b31"}, + {file = "pandas-1.5.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26d9c71772c7afb9d5046e6e9cf42d83dd147b5cf5bcb9d97252077118543792"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fbcb19d6fceb9e946b3e23258757c7b225ba450990d9ed63ccceeb8cae609f7"}, + {file = "pandas-1.5.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:565fa34a5434d38e9d250af3c12ff931abaf88050551d9fbcdfafca50d62babf"}, + {file = "pandas-1.5.3-cp38-cp38-win32.whl", hash = "sha256:87bd9c03da1ac870a6d2c8902a0e1fd4267ca00f13bc494c9e5a9020920e1d51"}, + {file = "pandas-1.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:41179ce559943d83a9b4bbacb736b04c928b095b5f25dd2b7389eda08f46f373"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c74a62747864ed568f5a82a49a23a8d7fe171d0c69038b38cedf0976831296fa"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c00e0b0597c8e4f59e8d461f797e5d70b4d025880516a8261b2817c47759ee"}, + {file = "pandas-1.5.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a50d9a4336a9621cab7b8eb3fb11adb82de58f9b91d84c2cd526576b881a0c5a"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd05f7783b3274aa206a1af06f0ceed3f9b412cf665b7247eacd83be41cf7bf0"}, + {file = "pandas-1.5.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f69c4029613de47816b1bb30ff5ac778686688751a5e9c99ad8c7031f6508e5"}, + {file = "pandas-1.5.3-cp39-cp39-win32.whl", hash = "sha256:7cec0bee9f294e5de5bbfc14d0573f65526071029d036b753ee6507d2a21480a"}, + {file = "pandas-1.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfd681c5dc216037e0b0a2c821f5ed99ba9f03ebcf119c7dac0e9a7b960b9ec9"}, + {file = "pandas-1.5.3.tar.gz", hash = "sha256:74a3fd7e5a7ec052f183273dc7b0acd3a863edf7520f5d3a1765c04ffdb3b0b1"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, + {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, +] +python-dateutil = ">=2.8.1" +pytz = ">=2020.1" + +[package.extras] +test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] + +[[package]] +name = "pluggy" +version = "1.4.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.6.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.16.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.16.2" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.21.1" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy (>=0.931)", "pytest-trio (>=0.7.0)"] + +[[package]] +name = "pytest-mock" +version = "3.12.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.12.0.tar.gz", hash = "sha256:31a40f038c22cad32287bb43932054451ff5583ff094bca6f675df2f8bc1a6e9"}, + {file = "pytest_mock-3.12.0-py3-none-any.whl", hash = "sha256:0972719a7263072da3a21c7f4773069bcc7486027d7e8e1f81d98a47e701bc4f"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-watcher" +version = "0.3.5" +description = "Automatically rerun your tests on file modifications" +optional = false +python-versions = ">=3.7.0,<4.0.0" +files = [ + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, +] + +[package.dependencies] +tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} +watchdog = ">=2.0.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.1.15" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, +] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "syrupy" +version = "4.6.1" +description = "Pytest Snapshot Test Utility" +optional = false +python-versions = ">=3.8.1,<4" +files = [ + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9.0.0" + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tenacity" +version = "8.2.3" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tenacity-8.2.3-py3-none-any.whl", hash = "sha256:ce510e327a630c9e1beaf17d42e6ffacc88185044ad85cf74c0a8887c6a0f88c"}, + {file = "tenacity-8.2.3.tar.gz", hash = "sha256:5398ef0d78e63f40007c1fb4c0bff96e1911394d2fa8d194f77619c05ff6cc8a"}, +] + +[package.extras] +doc = ["reno", "sphinx", "tornado (>=4.5)"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "types-requests" +version = "2.31.0.20240125" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.31.0.20240125.tar.gz", hash = "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5"}, + {file = "types_requests-2.31.0.20240125-py3-none-any.whl", hash = "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "urllib3" +version = "2.1.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "watchdog" +version = "4.0.0" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.8" +files = [ + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.10,<4.0" +content-hash = "1ea206277880af7ba191e1fe7f60e3a51274623c1697af1a689ed8ce35c0feb6" diff --git a/libs/partners/ibm/pyproject.toml b/libs/partners/ibm/pyproject.toml new file mode 100644 index 0000000000000..adcab41ef7fb3 --- /dev/null +++ b/libs/partners/ibm/pyproject.toml @@ -0,0 +1,96 @@ +[tool.poetry] +name = "langchain-ibm" +version = "0.0.1" +description = "An integration package connecting IBM watsonx.ai and LangChain" +authors = ["IBM"] +readme = "README.md" +repository = "https://github.com/langchain-ai/langchain" +license = "MIT" + +[tool.poetry.urls] +"Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/ibm" + +[tool.poetry.dependencies] +python = ">=3.10,<4.0" +langchain-core = ">=0.1.22,<0.2" +ibm-watsonx-ai = "^0.1.4" + +[tool.poetry.group.test] +optional = true + +[tool.poetry.group.test.dependencies] +pytest = "^7.3.0" +freezegun = "^1.2.2" +pytest-mock = "^3.10.0" +syrupy = "^4.0.2" +pytest-watcher = "^0.3.4" +pytest-asyncio = "^0.21.1" +langchain-core = {path = "../../core", develop = true} + +[tool.poetry.group.codespell] +optional = true + +[tool.poetry.group.codespell.dependencies] +codespell = "^2.2.0" + +[tool.poetry.group.test_integration] +optional = true + +[tool.poetry.group.test_integration.dependencies] + +[tool.poetry.group.lint] +optional = true + +[tool.poetry.group.lint.dependencies] +ruff = "^0.1.5" + +[tool.poetry.group.typing.dependencies] +mypy = "^0.991" +langchain-core = {path = "../../core", develop = true} +types-requests = "^2" + +[tool.poetry.group.dev] +optional = true + +[tool.poetry.group.dev.dependencies] +langchain-core = {path = "../../core", develop = true} + +[tool.ruff] +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort +] + +[tool.mypy] +disallow_untyped_defs = "True" + +[tool.coverage.run] +omit = [ + "tests/*", +] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.pytest.ini_options] +# --strict-markers will raise errors on unknown marks. +# https://docs.pytest.org/en/7.1.x/how-to/mark.html#raising-errors-on-unknown-marks +# +# https://docs.pytest.org/en/7.1.x/reference/reference.html +# --strict-config any warnings encountered while parsing the `pytest` +# section of the configuration file raise errors. +# +# https://github.com/tophat/syrupy +# --snapshot-warn-unused Prints a warning on unused snapshots rather than fail the test suite. +addopts = "--snapshot-warn-unused --strict-markers --strict-config --durations=5" +# Registering custom markers. +# https://docs.pytest.org/en/7.1.x/example/markers.html#registering-markers +markers = [ + "requires: mark tests as requiring a specific library", + "asyncio: mark tests as requiring asyncio", + "compile: mark placeholder test used to compile integration tests without running them", + "scheduled: mark tests to run in scheduled testing", +] +asyncio_mode = "auto" diff --git a/libs/partners/ibm/scripts/check_imports.py b/libs/partners/ibm/scripts/check_imports.py new file mode 100644 index 0000000000000..fd21a4975b7f0 --- /dev/null +++ b/libs/partners/ibm/scripts/check_imports.py @@ -0,0 +1,17 @@ +import sys +import traceback +from importlib.machinery import SourceFileLoader + +if __name__ == "__main__": + files = sys.argv[1:] + has_failure = False + for file in files: + try: + SourceFileLoader("x", file).load_module() + except Exception: + has_faillure = True + print(file) + traceback.print_exc() + print() + + sys.exit(1 if has_failure else 0) diff --git a/libs/partners/ibm/scripts/check_pydantic.sh b/libs/partners/ibm/scripts/check_pydantic.sh new file mode 100755 index 0000000000000..06b5bb81ae236 --- /dev/null +++ b/libs/partners/ibm/scripts/check_pydantic.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# +# This script searches for lines starting with "import pydantic" or "from pydantic" +# in tracked files within a Git repository. +# +# Usage: ./scripts/check_pydantic.sh /path/to/repository + +# Check if a path argument is provided +if [ $# -ne 1 ]; then + echo "Usage: $0 /path/to/repository" + exit 1 +fi + +repository_path="$1" + +# Search for lines matching the pattern within the specified repository +result=$(git -C "$repository_path" grep -E '^import pydantic|^from pydantic') + +# Check if any matching lines were found +if [ -n "$result" ]; then + echo "ERROR: The following lines need to be updated:" + echo "$result" + echo "Please replace the code with an import from langchain_core.pydantic_v1." + echo "For example, replace 'from pydantic import BaseModel'" + echo "with 'from langchain_core.pydantic_v1 import BaseModel'" + exit 1 +fi diff --git a/libs/partners/ibm/scripts/lint_imports.sh b/libs/partners/ibm/scripts/lint_imports.sh new file mode 100755 index 0000000000000..695613c7ba8fd --- /dev/null +++ b/libs/partners/ibm/scripts/lint_imports.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +set -eu + +# Initialize a variable to keep track of errors +errors=0 + +# make sure not importing from langchain or langchain_experimental +git --no-pager grep '^from langchain\.' . && errors=$((errors+1)) +git --no-pager grep '^from langchain_experimental\.' . && errors=$((errors+1)) + +# Decide on an exit status based on the errors +if [ "$errors" -gt 0 ]; then + exit 1 +else + exit 0 +fi diff --git a/libs/partners/ibm/tests/__init__.py b/libs/partners/ibm/tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ibm/tests/integration_tests/__init__.py b/libs/partners/ibm/tests/integration_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ibm/tests/integration_tests/test_compile.py b/libs/partners/ibm/tests/integration_tests/test_compile.py new file mode 100644 index 0000000000000..33ecccdfa0fbd --- /dev/null +++ b/libs/partners/ibm/tests/integration_tests/test_compile.py @@ -0,0 +1,7 @@ +import pytest + + +@pytest.mark.compile +def test_placeholder() -> None: + """Used for compiling integration tests without running any real tests.""" + pass diff --git a/libs/partners/ibm/tests/integration_tests/test_llms.py b/libs/partners/ibm/tests/integration_tests/test_llms.py new file mode 100644 index 0000000000000..cc93ee5277e40 --- /dev/null +++ b/libs/partners/ibm/tests/integration_tests/test_llms.py @@ -0,0 +1,83 @@ +"""Test WatsonxLLM API wrapper. + +You'll need to set WATSONX_APIKEY and WATSONX_PROJECT_ID environment variables. +""" + +import os + +from langchain_core.outputs import LLMResult + +from langchain_ibm import WatsonxLLM + +PROJECT_ID = os.environ.get("WATSONX_PROJECT_ID", "") + + +def test_watsonxllm_invoke() -> None: + watsonxllm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + project_id=PROJECT_ID, + ) + response = watsonxllm.invoke("What color sunflower is?") + assert isinstance(response, str) + assert len(response) > 0 + + +def test_watsonxllm_generate() -> None: + watsonxllm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + project_id=PROJECT_ID, + ) + response = watsonxllm.generate(["What color sunflower is?"]) + response_text = response.generations[0][0].text + assert isinstance(response, LLMResult) + assert len(response_text) > 0 + + +def test_watsonxllm_generate_with_multiple_prompts() -> None: + watsonxllm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + project_id=PROJECT_ID, + ) + response = watsonxllm.generate( + ["What color sunflower is?", "What color turtle is?"] + ) + response_text = response.generations[0][0].text + assert isinstance(response, LLMResult) + assert len(response_text) > 0 + + +def test_watsonxllm_generate_stream() -> None: + watsonxllm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + project_id=PROJECT_ID, + ) + response = watsonxllm.generate(["What color sunflower is?"], stream=True) + response_text = response.generations[0][0].text + assert isinstance(response, LLMResult) + assert len(response_text) > 0 + + +def test_watsonxllm_stream() -> None: + watsonxllm = WatsonxLLM( + model_id="google/flan-ul2", + url="https://us-south.ml.cloud.ibm.com", + project_id=PROJECT_ID, + ) + response = watsonxllm.invoke("What color sunflower is?") + + stream_response = watsonxllm.stream("What color sunflower is?") + + linked_text_stream = "" + for chunk in stream_response: + assert isinstance( + chunk, str + ), f"chunk expect type '{str}', actual '{type(chunk)}'" + linked_text_stream += chunk + + assert ( + response == linked_text_stream + ), "Linked text stream are not the same as generated text" diff --git a/libs/partners/ibm/tests/unit_tests/__init__.py b/libs/partners/ibm/tests/unit_tests/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/libs/partners/ibm/tests/unit_tests/test_imports.py b/libs/partners/ibm/tests/unit_tests/test_imports.py new file mode 100644 index 0000000000000..e623ff7558364 --- /dev/null +++ b/libs/partners/ibm/tests/unit_tests/test_imports.py @@ -0,0 +1,7 @@ +from langchain_ibm import __all__ + +EXPECTED_ALL = ["WatsonxLLM"] + + +def test_all_imports() -> None: + assert sorted(EXPECTED_ALL) == sorted(__all__) diff --git a/libs/partners/ibm/tests/unit_tests/test_llms.py b/libs/partners/ibm/tests/unit_tests/test_llms.py new file mode 100644 index 0000000000000..01d86590af10b --- /dev/null +++ b/libs/partners/ibm/tests/unit_tests/test_llms.py @@ -0,0 +1,60 @@ +"""Test WatsonxLLM API wrapper.""" + +import os + +from langchain_ibm import WatsonxLLM + +os.environ.pop("WATSONX_APIKEY", None) +os.environ.pop("WATSONX_PROJECT_ID", None) + + +def test_initialize_watsonxllm_bad_path_without_url() -> None: + try: + WatsonxLLM( + model_id="google/flan-ul2", + ) + except ValueError as e: + assert "WATSONX_URL" in e.__str__() + + +def test_initialize_watsonxllm_cloud_bad_path() -> None: + try: + WatsonxLLM(model_id="google/flan-ul2", url="https://us-south.ml.cloud.ibm.com") + except ValueError as e: + assert "WATSONX_APIKEY" in e.__str__() + + +def test_initialize_watsonxllm_cpd_bad_path_without_all() -> None: + try: + WatsonxLLM( + model_id="google/flan-ul2", + url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", + ) + except ValueError as e: + assert ( + "WATSONX_APIKEY" in e.__str__() + and "WATSONX_PASSWORD" in e.__str__() + and "WATSONX_TOKEN" in e.__str__() + ) + + +def test_initialize_watsonxllm_cpd_bad_path_password_without_username() -> None: + try: + WatsonxLLM( + model_id="google/flan-ul2", + url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", + password="test_password", + ) + except ValueError as e: + assert "WATSONX_USERNAME" in e.__str__() + + +def test_initialize_watsonxllm_cpd_bad_path_apikey_without_username() -> None: + try: + WatsonxLLM( + model_id="google/flan-ul2", + url="https://cpd-zen.apps.cpd48.cp.fyre.ibm.com", + apikey="test_apikey", + ) + except ValueError as e: + assert "WATSONX_USERNAME" in e.__str__() diff --git a/libs/partners/mistralai/poetry.lock b/libs/partners/mistralai/poetry.lock index ea414d7b199a1..92478c94e0e20 100644 --- a/libs/partners/mistralai/poetry.lock +++ b/libs/partners/mistralai/poetry.lock @@ -376,7 +376,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.19" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -386,7 +386,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -402,13 +402,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.86" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.86-py3-none-any.whl", hash = "sha256:7af15c36edb8c9fd9ae5c6d4fb940eb1da668b630a703d63c90c91e9be53aefb"}, - {file = "langsmith-0.0.86.tar.gz", hash = "sha256:c1572824664810c4425b17f2d1e9a59d53992e6898df22a37236c62d3c80f59e"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -417,13 +417,13 @@ requests = ">=2,<3" [[package]] name = "mistralai" -version = "0.0.11" +version = "0.0.12" description = "" optional = false python-versions = ">=3.8,<4.0" files = [ - {file = "mistralai-0.0.11-py3-none-any.whl", hash = "sha256:fb2a240a3985420c4e7db48eb5077d6d6dbc5e83cac0dd948c20342fb48087ee"}, - {file = "mistralai-0.0.11.tar.gz", hash = "sha256:383072715531198305dab829ab3749b64933bbc2549354f3c9ebc43c17b912cf"}, + {file = "mistralai-0.0.12-py3-none-any.whl", hash = "sha256:d489d1f0a31bf0edbe15c6d12f68b943148d2a725a088be0d8a5d4c888f8436c"}, + {file = "mistralai-0.0.12.tar.gz", hash = "sha256:fe652836146a15bdce7691a95803a32c53c641c5400093447ffa93bf2ed296b2"}, ] [package.dependencies] @@ -861,121 +861,121 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tokenizers" -version = "0.15.1" +version = "0.15.2" description = "" optional = false python-versions = ">=3.7" files = [ - {file = "tokenizers-0.15.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:32c9491dd1bcb33172c26b454dbd607276af959b9e78fa766e2694cafab3103c"}, - {file = "tokenizers-0.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29a1b784b870a097e7768f8c20c2dd851e2c75dad3efdae69a79d3e7f1d614d5"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0049fbe648af04148b08cb211994ce8365ee628ce49724b56aaefd09a3007a78"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e84b3c235219e75e24de6b71e6073cd2c8d740b14d88e4c6d131b90134e3a338"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8cc575769ea11d074308c6d71cb10b036cdaec941562c07fc7431d956c502f0e"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bf28f299c4158e6d0b5eaebddfd500c4973d947ffeaca8bcbe2e8c137dff0b"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:506555f98361db9c74e1323a862d77dcd7d64c2058829a368bf4159d986e339f"}, - {file = "tokenizers-0.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7061b0a28ade15906f5b2ec8c48d3bdd6e24eca6b427979af34954fbe31d5cef"}, - {file = "tokenizers-0.15.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ed5e35507b7a0e2aac3285c4f5e37d4ec5cfc0e5825b862b68a0aaf2757af52"}, - {file = "tokenizers-0.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1c9df9247df0de6509dd751b1c086e5f124b220133b5c883bb691cb6fb3d786f"}, - {file = "tokenizers-0.15.1-cp310-none-win32.whl", hash = "sha256:dd999af1b4848bef1b11d289f04edaf189c269d5e6afa7a95fa1058644c3f021"}, - {file = "tokenizers-0.15.1-cp310-none-win_amd64.whl", hash = "sha256:39d06a57f7c06940d602fad98702cf7024c4eee7f6b9fe76b9f2197d5a4cc7e2"}, - {file = "tokenizers-0.15.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8ad034eb48bf728af06915e9294871f72fcc5254911eddec81d6df8dba1ce055"}, - {file = "tokenizers-0.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea9ede7c42f8fa90f31bfc40376fd91a7d83a4aa6ad38e6076de961d48585b26"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b85d6fe1a20d903877aa0ef32ef6b96e81e0e48b71c206d6046ce16094de6970"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a7d44f656320137c7d643b9c7dcc1814763385de737fb98fd2643880910f597"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd244bd0793cdacf27ee65ec3db88c21f5815460e8872bbeb32b040469d6774e"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3f4a36e371b3cb1123adac8aeeeeab207ad32f15ed686d9d71686a093bb140"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2921a53966afb29444da98d56a6ccbef23feb3b0c0f294b4e502370a0a64f25"}, - {file = "tokenizers-0.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f49068cf51f49c231067f1a8c9fc075ff960573f6b2a956e8e1b0154fb638ea5"}, - {file = "tokenizers-0.15.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0ab1a22f20eaaab832ab3b00a0709ca44a0eb04721e580277579411b622c741c"}, - {file = "tokenizers-0.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:671268f24b607c4adc6fa2b5b580fd4211b9f84b16bd7f46d62f8e5be0aa7ba4"}, - {file = "tokenizers-0.15.1-cp311-none-win32.whl", hash = "sha256:a4f03e33d2bf7df39c8894032aba599bf90f6f6378e683a19d28871f09bb07fc"}, - {file = "tokenizers-0.15.1-cp311-none-win_amd64.whl", hash = "sha256:30f689537bcc7576d8bd4daeeaa2cb8f36446ba2f13f421b173e88f2d8289c4e"}, - {file = "tokenizers-0.15.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f3a379dd0898a82ea3125e8f9c481373f73bffce6430d4315f0b6cd5547e409"}, - {file = "tokenizers-0.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d870ae58bba347d38ac3fc8b1f662f51e9c95272d776dd89f30035c83ee0a4f"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d6d28e0143ec2e253a8a39e94bf1d24776dbe73804fa748675dbffff4a5cd6d8"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61ae9ac9f44e2da128ee35db69489883b522f7abe033733fa54eb2de30dac23d"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8e322a47e29128300b3f7749a03c0ec2bce0a3dc8539ebff738d3f59e233542"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:760334f475443bc13907b1a8e1cb0aeaf88aae489062546f9704dce6c498bfe2"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1b173753d4aca1e7d0d4cb52b5e3ffecfb0ca014e070e40391b6bb4c1d6af3f2"}, - {file = "tokenizers-0.15.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82c1f13d457c8f0ab17e32e787d03470067fe8a3b4d012e7cc57cb3264529f4a"}, - {file = "tokenizers-0.15.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:425b46ceff4505f20191df54b50ac818055d9d55023d58ae32a5d895b6f15bb0"}, - {file = "tokenizers-0.15.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:681ac6ba3b4fdaf868ead8971221a061f580961c386e9732ea54d46c7b72f286"}, - {file = "tokenizers-0.15.1-cp312-none-win32.whl", hash = "sha256:f2272656063ccfba2044df2115095223960d80525d208e7a32f6c01c351a6f4a"}, - {file = "tokenizers-0.15.1-cp312-none-win_amd64.whl", hash = "sha256:9abe103203b1c6a2435d248d5ff4cceebcf46771bfbc4957a98a74da6ed37674"}, - {file = "tokenizers-0.15.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2ce9ed5c8ef26b026a66110e3c7b73d93ec2d26a0b1d0ea55ddce61c0e5f446f"}, - {file = "tokenizers-0.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:89b24d366137986c3647baac29ef902d2d5445003d11c30df52f1bd304689aeb"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0faebedd01b413ab777ca0ee85914ed8b031ea5762ab0ea60b707ce8b9be6842"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbd9dfcdad4f3b95d801f768e143165165055c18e44ca79a8a26de889cd8e85"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:97194324c12565b07e9993ca9aa813b939541185682e859fb45bb8d7d99b3193"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:485e43e2cc159580e0d83fc919ec3a45ae279097f634b1ffe371869ffda5802c"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:191d084d60e3589d6420caeb3f9966168269315f8ec7fbc3883122dc9d99759d"}, - {file = "tokenizers-0.15.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c28cc8d7220634a75b14c53f4fc9d1b485f99a5a29306a999c115921de2897"}, - {file = "tokenizers-0.15.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:325212027745d3f8d5d5006bb9e5409d674eb80a184f19873f4f83494e1fdd26"}, - {file = "tokenizers-0.15.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3c5573603c36ce12dbe318bcfb490a94cad2d250f34deb2f06cb6937957bbb71"}, - {file = "tokenizers-0.15.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:1441161adb6d71a15a630d5c1d8659d5ebe41b6b209586fbeea64738e58fcbb2"}, - {file = "tokenizers-0.15.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:382a8d0c31afcfb86571afbfefa37186df90865ce3f5b731842dab4460e53a38"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e76959783e3f4ec73b3f3d24d4eec5aa9225f0bee565c48e77f806ed1e048f12"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:401df223e5eb927c5961a0fc6b171818a2bba01fb36ef18c3e1b69b8cd80e591"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52606c233c759561a16e81b2290a7738c3affac7a0b1f0a16fe58dc22e04c7d"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b72c658bbe5a05ed8bc2ac5ad782385bfd743ffa4bc87d9b5026341e709c6f44"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:25f5643a2f005c42f0737a326c6c6bdfedfdc9a994b10a1923d9c3e792e4d6a6"}, - {file = "tokenizers-0.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c5b6f633999d6b42466bbfe21be2e26ad1760b6f106967a591a41d8cbca980e"}, - {file = "tokenizers-0.15.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ceb5c9ad11a015150b545c1a11210966a45b8c3d68a942e57cf8938c578a77ca"}, - {file = "tokenizers-0.15.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bedd4ce0c4872db193444c395b11c7697260ce86a635ab6d48102d76be07d324"}, - {file = "tokenizers-0.15.1-cp37-none-win32.whl", hash = "sha256:cd6caef6c14f5ed6d35f0ddb78eab8ca6306d0cd9870330bccff72ad014a6f42"}, - {file = "tokenizers-0.15.1-cp37-none-win_amd64.whl", hash = "sha256:d2bd7af78f58d75a55e5df61efae164ab9200c04b76025f9cc6eeb7aff3219c2"}, - {file = "tokenizers-0.15.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:59b3ca6c02e0bd5704caee274978bd055de2dff2e2f39dadf536c21032dfd432"}, - {file = "tokenizers-0.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:48fe21b67c22583bed71933a025fd66b1f5cfae1baefa423c3d40379b5a6e74e"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3d190254c66a20fb1efbdf035e6333c5e1f1c73b1f7bfad88f9c31908ac2c2c4"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fef90c8f5abf17d48d6635f5fd92ad258acd1d0c2d920935c8bf261782cfe7c8"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fac011ef7da3357aa7eb19efeecf3d201ede9618f37ddedddc5eb809ea0963ca"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:574ec5b3e71d1feda6b0ecac0e0445875729b4899806efbe2b329909ec75cb50"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aca16c3c0637c051a59ea99c4253f16fbb43034fac849076a7e7913b2b9afd2d"}, - {file = "tokenizers-0.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a6f238fc2bbfd3e12e8529980ec1624c7e5b69d4e959edb3d902f36974f725a"}, - {file = "tokenizers-0.15.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:587e11a26835b73c31867a728f32ca8a93c9ded4a6cd746516e68b9d51418431"}, - {file = "tokenizers-0.15.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6456e7ad397352775e2efdf68a9ec5d6524bbc4543e926eef428d36de627aed4"}, - {file = "tokenizers-0.15.1-cp38-none-win32.whl", hash = "sha256:614f0da7dd73293214bd143e6221cafd3f7790d06b799f33a987e29d057ca658"}, - {file = "tokenizers-0.15.1-cp38-none-win_amd64.whl", hash = "sha256:a4fa0a20d9f69cc2bf1cfce41aa40588598e77ec1d6f56bf0eb99769969d1ede"}, - {file = "tokenizers-0.15.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8d3f18a45e0cf03ce193d5900460dc2430eec4e14c786e5d79bddba7ea19034f"}, - {file = "tokenizers-0.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:38dbd6c38f88ad7d5dc5d70c764415d38fe3bcd99dc81638b572d093abc54170"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:777286b1f7e52de92aa4af49fe31046cfd32885d1bbaae918fab3bba52794c33"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58d4d550a3862a47dd249892d03a025e32286eb73cbd6bc887fb8fb64bc97165"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eda68ce0344f35042ae89220b40a0007f721776b727806b5c95497b35714bb7"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cd33d15f7a3a784c3b665cfe807b8de3c6779e060349bd5005bb4ae5bdcb437"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0a1aa370f978ac0bfb50374c3a40daa93fd56d47c0c70f0c79607fdac2ccbb42"}, - {file = "tokenizers-0.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:241482b940340fff26a2708cb9ba383a5bb8a2996d67a0ff2c4367bf4b86cc3a"}, - {file = "tokenizers-0.15.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:68f30b05f46a4d9aba88489eadd021904afe90e10a7950e28370d6e71b9db021"}, - {file = "tokenizers-0.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a3c5d8025529670462b881b7b2527aacb6257398c9ec8e170070432c3ae3a82"}, - {file = "tokenizers-0.15.1-cp39-none-win32.whl", hash = "sha256:74d1827830f60a9d78da8f6d49a1fbea5422ce0eea42e2617877d23380a7efbc"}, - {file = "tokenizers-0.15.1-cp39-none-win_amd64.whl", hash = "sha256:9ff499923e4d6876d6b6a63ea84a56805eb35e91dd89b933a7aee0c56a3838c6"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b3aa007a0f4408f62a8471bdaa3faccad644cbf2622639f2906b4f9b5339e8b8"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f3d4176fa93d8b2070db8f3c70dc21106ae6624fcaaa334be6bdd3a0251e729e"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d0e463655ef8b2064df07bd4a445ed7f76f6da3b286b4590812587d42f80e89"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:089138fd0351b62215c462a501bd68b8df0e213edcf99ab9efd5dba7b4cb733e"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e563ac628f5175ed08e950430e2580e544b3e4b606a0995bb6b52b3a3165728"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:244dcc28c5fde221cb4373961b20da30097669005b122384d7f9f22752487a46"}, - {file = "tokenizers-0.15.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d82951d46052dddae1369e68ff799a0e6e29befa9a0b46e387ae710fd4daefb0"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7b14296bc9059849246ceb256ffbe97f8806a9b5d707e0095c22db312f4fc014"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0309357bb9b6c8d86cdf456053479d7112074b470651a997a058cd7ad1c4ea57"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:083f06e9d8d01b70b67bcbcb7751b38b6005512cce95808be6bf34803534a7e7"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85288aea86ada579789447f0dcec108ebef8da4b450037eb4813d83e4da9371e"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:385e6fcb01e8de90c1d157ae2a5338b23368d0b1c4cc25088cdca90147e35d17"}, - {file = "tokenizers-0.15.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:60067edfcbf7d6cd448ac47af41ec6e84377efbef7be0c06f15a7c1dd069e044"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f7e37f89acfe237d4eaf93c3b69b0f01f407a7a5d0b5a8f06ba91943ea3cf10"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:6a63a15b523d42ebc1f4028e5a568013388c2aefa4053a263e511cb10aaa02f1"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2417d9e4958a6c2fbecc34c27269e74561c55d8823bf914b422e261a11fdd5fd"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8550974bace6210e41ab04231e06408cf99ea4279e0862c02b8d47e7c2b2828"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:194ba82129b171bcd29235a969e5859a93e491e9b0f8b2581f500f200c85cfdd"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1bfd95eef8b01e6c0805dbccc8eaf41d8c5a84f0cce72c0ab149fe76aae0bce6"}, - {file = "tokenizers-0.15.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b87a15dd72f8216b03c151e3dace00c75c3fe7b0ee9643c25943f31e582f1a34"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6ac22f358a0c2a6c685be49136ce7ea7054108986ad444f567712cf274b34cd8"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e9d1f046a9b9d9a95faa103f07db5921d2c1c50f0329ebba4359350ee02b18b"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a0fd30a4b74485f6a7af89fffb5fb84d6d5f649b3e74f8d37f624cc9e9e97cf"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:80e45dc206b9447fa48795a1247c69a1732d890b53e2cc51ba42bc2fefa22407"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4eaff56ef3e218017fa1d72007184401f04cb3a289990d2b6a0a76ce71c95f96"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b41dc107e4a4e9c95934e79b025228bbdda37d9b153d8b084160e88d5e48ad6f"}, - {file = "tokenizers-0.15.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1922b8582d0c33488764bcf32e80ef6054f515369e70092729c928aae2284bc2"}, - {file = "tokenizers-0.15.1.tar.gz", hash = "sha256:c0a331d6d5a3d6e97b7f99f562cee8d56797180797bc55f12070e495e717c980"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:52f6130c9cbf70544287575a985bf44ae1bda2da7e8c24e97716080593638012"}, + {file = "tokenizers-0.15.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:054c1cc9c6d68f7ffa4e810b3d5131e0ba511b6e4be34157aa08ee54c2f8d9ee"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a9b9b070fdad06e347563b88c278995735292ded1132f8657084989a4c84a6d5"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea621a7eef4b70e1f7a4e84dd989ae3f0eeb50fc8690254eacc08acb623e82f1"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cf7fd9a5141634fa3aa8d6b7be362e6ae1b4cda60da81388fa533e0b552c98fd"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44f2a832cd0825295f7179eaf173381dc45230f9227ec4b44378322d900447c9"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b9ec69247a23747669ec4b0ca10f8e3dfb3545d550258129bd62291aabe8605"}, + {file = "tokenizers-0.15.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b6a4c78da863ff26dbd5ad9a8ecc33d8a8d97b535172601cf00aee9d7ce9ce"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5ab2a4d21dcf76af60e05af8063138849eb1d6553a0d059f6534357bce8ba364"}, + {file = "tokenizers-0.15.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a47acfac7e511f6bbfcf2d3fb8c26979c780a91e06fb5b9a43831b2c0153d024"}, + {file = "tokenizers-0.15.2-cp310-none-win32.whl", hash = "sha256:064ff87bb6acdbd693666de9a4b692add41308a2c0ec0770d6385737117215f2"}, + {file = "tokenizers-0.15.2-cp310-none-win_amd64.whl", hash = "sha256:3b919afe4df7eb6ac7cafd2bd14fb507d3f408db7a68c43117f579c984a73843"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:89cd1cb93e4b12ff39bb2d626ad77e35209de9309a71e4d3d4672667b4b256e7"}, + {file = "tokenizers-0.15.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cfed5c64e5be23d7ee0f0e98081a25c2a46b0b77ce99a4f0605b1ec43dd481fa"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a907d76dcfda37023ba203ab4ceeb21bc5683436ebefbd895a0841fd52f6f6f2"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20ea60479de6fc7b8ae756b4b097572372d7e4032e2521c1bbf3d90c90a99ff0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:48e2b9335be2bc0171df9281385c2ed06a15f5cf121c44094338306ab7b33f2c"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112a1dd436d2cc06e6ffdc0b06d55ac019a35a63afd26475205cb4b1bf0bfbff"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4620cca5c2817177ee8706f860364cc3a8845bc1e291aaf661fb899e5d1c45b0"}, + {file = "tokenizers-0.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd73a82751c523b3fc31ff8194702e4af4db21dc20e55b30ecc2079c5d43cb7"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:107089f135b4ae7817affe6264f8c7a5c5b4fd9a90f9439ed495f54fcea56fb4"}, + {file = "tokenizers-0.15.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0ff110ecc57b7aa4a594396525a3451ad70988e517237fe91c540997c4e50e29"}, + {file = "tokenizers-0.15.2-cp311-none-win32.whl", hash = "sha256:6d76f00f5c32da36c61f41c58346a4fa7f0a61be02f4301fd30ad59834977cc3"}, + {file = "tokenizers-0.15.2-cp311-none-win_amd64.whl", hash = "sha256:cc90102ed17271cf0a1262babe5939e0134b3890345d11a19c3145184b706055"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f86593c18d2e6248e72fb91c77d413a815153b8ea4e31f7cd443bdf28e467670"}, + {file = "tokenizers-0.15.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0774bccc6608eca23eb9d620196687c8b2360624619623cf4ba9dc9bd53e8b51"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d0222c5b7c9b26c0b4822a82f6a7011de0a9d3060e1da176f66274b70f846b98"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3835738be1de66624fff2f4f6f6684775da4e9c00bde053be7564cbf3545cc66"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0143e7d9dcd811855c1ce1ab9bf5d96d29bf5e528fd6c7824d0465741e8c10fd"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db35825f6d54215f6b6009a7ff3eedee0848c99a6271c870d2826fbbedf31a38"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f5e64b0389a2be47091d8cc53c87859783b837ea1a06edd9d8e04004df55a5c"}, + {file = "tokenizers-0.15.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e0480c452217edd35eca56fafe2029fb4d368b7c0475f8dfa3c5c9c400a7456"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a33ab881c8fe70474980577e033d0bc9a27b7ab8272896e500708b212995d834"}, + {file = "tokenizers-0.15.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a308a607ca9de2c64c1b9ba79ec9a403969715a1b8ba5f998a676826f1a7039d"}, + {file = "tokenizers-0.15.2-cp312-none-win32.whl", hash = "sha256:b8fcfa81bcb9447df582c5bc96a031e6df4da2a774b8080d4f02c0c16b42be0b"}, + {file = "tokenizers-0.15.2-cp312-none-win_amd64.whl", hash = "sha256:38d7ab43c6825abfc0b661d95f39c7f8af2449364f01d331f3b51c94dcff7221"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:38bfb0204ff3246ca4d5e726e8cc8403bfc931090151e6eede54d0e0cf162ef0"}, + {file = "tokenizers-0.15.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c861d35e8286a53e06e9e28d030b5a05bcbf5ac9d7229e561e53c352a85b1fc"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:936bf3842db5b2048eaa53dade907b1160f318e7c90c74bfab86f1e47720bdd6"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:620beacc3373277700d0e27718aa8b25f7b383eb8001fba94ee00aeea1459d89"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2735ecbbf37e52db4ea970e539fd2d450d213517b77745114f92867f3fc246eb"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:473c83c5e2359bb81b0b6fde870b41b2764fcdd36d997485e07e72cc3a62264a"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968fa1fb3c27398b28a4eca1cbd1e19355c4d3a6007f7398d48826bbe3a0f728"}, + {file = "tokenizers-0.15.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:865c60ae6eaebdde7da66191ee9b7db52e542ed8ee9d2c653b6d190a9351b980"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7c0d8b52664ab2d4a8d6686eb5effc68b78608a9008f086a122a7b2996befbab"}, + {file = "tokenizers-0.15.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f33dfbdec3784093a9aebb3680d1f91336c56d86cc70ddf88708251da1fe9064"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d44ba80988ff9424e33e0a49445072ac7029d8c0e1601ad25a0ca5f41ed0c1d6"}, + {file = "tokenizers-0.15.2-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:dce74266919b892f82b1b86025a613956ea0ea62a4843d4c4237be2c5498ed3a"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0ef06b9707baeb98b316577acb04f4852239d856b93e9ec3a299622f6084e4be"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c73e2e74bbb07910da0d37c326869f34113137b23eadad3fc00856e6b3d9930c"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4eeb12daf02a59e29f578a865f55d87cd103ce62bd8a3a5874f8fdeaa82e336b"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ba9f6895af58487ca4f54e8a664a322f16c26bbb442effd01087eba391a719e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccec77aa7150e38eec6878a493bf8c263ff1fa8a62404e16c6203c64c1f16a26"}, + {file = "tokenizers-0.15.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3f40604f5042ff210ba82743dda2b6aa3e55aa12df4e9f2378ee01a17e2855e"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5645938a42d78c4885086767c70923abad047163d809c16da75d6b290cb30bbe"}, + {file = "tokenizers-0.15.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:05a77cbfebe28a61ab5c3891f9939cc24798b63fa236d84e5f29f3a85a200c00"}, + {file = "tokenizers-0.15.2-cp37-none-win32.whl", hash = "sha256:361abdc068e8afe9c5b818769a48624687fb6aaed49636ee39bec4e95e1a215b"}, + {file = "tokenizers-0.15.2-cp37-none-win_amd64.whl", hash = "sha256:7ef789f83eb0f9baeb4d09a86cd639c0a5518528f9992f38b28e819df397eb06"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:4fe1f74a902bee74a3b25aff180fbfbf4f8b444ab37c4d496af7afd13a784ed2"}, + {file = "tokenizers-0.15.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4b89038a684f40a6b15d6b09f49650ac64d951ad0f2a3ea9169687bbf2a8ba"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d05a1b06f986d41aed5f2de464c003004b2df8aaf66f2b7628254bcbfb72a438"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:508711a108684111ec8af89d3a9e9e08755247eda27d0ba5e3c50e9da1600f6d"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:daa348f02d15160cb35439098ac96e3a53bacf35885072611cd9e5be7d333daa"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:494fdbe5932d3416de2a85fc2470b797e6f3226c12845cadf054dd906afd0442"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2d60f5246f4da9373f75ff18d64c69cbf60c3bca597290cea01059c336d2470"}, + {file = "tokenizers-0.15.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93268e788825f52de4c7bdcb6ebc1fcd4a5442c02e730faa9b6b08f23ead0e24"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6fc7083ab404019fc9acafe78662c192673c1e696bd598d16dc005bd663a5cf9"}, + {file = "tokenizers-0.15.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:41e39b41e5531d6b2122a77532dbea60e171ef87a3820b5a3888daa847df4153"}, + {file = "tokenizers-0.15.2-cp38-none-win32.whl", hash = "sha256:06cd0487b1cbfabefb2cc52fbd6b1f8d4c37799bd6c6e1641281adaa6b2504a7"}, + {file = "tokenizers-0.15.2-cp38-none-win_amd64.whl", hash = "sha256:5179c271aa5de9c71712e31cb5a79e436ecd0d7532a408fa42a8dbfa4bc23fd9"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82f8652a74cc107052328b87ea8b34291c0f55b96d8fb261b3880216a9f9e48e"}, + {file = "tokenizers-0.15.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:02458bee6f5f3139f1ebbb6d042b283af712c0981f5bc50edf771d6b762d5e4f"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c9a09cd26cca2e1c349f91aa665309ddb48d71636370749414fbf67bc83c5343"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:158be8ea8554e5ed69acc1ce3fbb23a06060bd4bbb09029431ad6b9a466a7121"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ddba9a2b0c8c81633eca0bb2e1aa5b3a15362b1277f1ae64176d0f6eba78ab1"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ef5dd1d39797044642dbe53eb2bc56435308432e9c7907728da74c69ee2adca"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:454c203164e07a860dbeb3b1f4a733be52b0edbb4dd2e5bd75023ffa8b49403a"}, + {file = "tokenizers-0.15.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf6b7f1d4dc59af960e6ffdc4faffe6460bbfa8dce27a58bf75755ffdb2526d"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2ef09bbc16519f6c25d0c7fc0c6a33a6f62923e263c9d7cca4e58b8c61572afb"}, + {file = "tokenizers-0.15.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c9a2ebdd2ad4ec7a68e7615086e633857c85e2f18025bd05d2a4399e6c5f7169"}, + {file = "tokenizers-0.15.2-cp39-none-win32.whl", hash = "sha256:918fbb0eab96fe08e72a8c2b5461e9cce95585d82a58688e7f01c2bd546c79d0"}, + {file = "tokenizers-0.15.2-cp39-none-win_amd64.whl", hash = "sha256:524e60da0135e106b254bd71f0659be9f89d83f006ea9093ce4d1fab498c6d0d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6a9b648a58281c4672212fab04e60648fde574877d0139cd4b4f93fe28ca8944"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7c7d18b733be6bbca8a55084027f7be428c947ddf871c500ee603e375013ffba"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:13ca3611de8d9ddfbc4dc39ef54ab1d2d4aaa114ac8727dfdc6a6ec4be017378"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:237d1bf3361cf2e6463e6c140628e6406766e8b27274f5fcc62c747ae3c6f094"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a0fe1e49e60c664915e9fb6b0cb19bac082ab1f309188230e4b2920230edb3"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4e022fe65e99230b8fd89ebdfea138c24421f91c1a4f4781a8f5016fd5cdfb4d"}, + {file = "tokenizers-0.15.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d857be2df69763362ac699f8b251a8cd3fac9d21893de129bc788f8baaef2693"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:708bb3e4283177236309e698da5fcd0879ce8fd37457d7c266d16b550bcbbd18"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c35e09e9899b72a76e762f9854e8750213f67567787d45f37ce06daf57ca78"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1257f4394be0d3b00de8c9e840ca5601d0a4a8438361ce9c2b05c7d25f6057b"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02272fe48280e0293a04245ca5d919b2c94a48b408b55e858feae9618138aeda"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dc3ad9ebc76eabe8b1d7c04d38be884b8f9d60c0cdc09b0aa4e3bcf746de0388"}, + {file = "tokenizers-0.15.2-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:32e16bdeffa7c4f46bf2152172ca511808b952701d13e7c18833c0b73cb5c23f"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fb16ba563d59003028b678d2361a27f7e4ae0ab29c7a80690efa20d829c81fdb"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:2277c36d2d6cdb7876c274547921a42425b6810d38354327dd65a8009acf870c"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cf75d32e8d250781940d07f7eece253f2fe9ecdb1dc7ba6e3833fa17b82fcbc"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b3b31884dc8e9b21508bb76da80ebf7308fdb947a17affce815665d5c4d028"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10122d8d8e30afb43bb1fe21a3619f62c3e2574bff2699cf8af8b0b6c5dc4a3"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d88b96ff0fe8e91f6ef01ba50b0d71db5017fa4e3b1d99681cec89a85faf7bf7"}, + {file = "tokenizers-0.15.2-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:37aaec5a52e959892870a7c47cef80c53797c0db9149d458460f4f31e2fb250e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e2ea752f2b0fe96eb6e2f3adbbf4d72aaa1272079b0dfa1145507bd6a5d537e6"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b19a808d8799fda23504a5cd31d2f58e6f52f140380082b352f877017d6342b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:64c86e5e068ac8b19204419ed8ca90f9d25db20578f5881e337d203b314f4104"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de19c4dc503c612847edf833c82e9f73cd79926a384af9d801dcf93f110cea4e"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea09acd2fe3324174063d61ad620dec3bcf042b495515f27f638270a7d466e8b"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cf27fd43472e07b57cf420eee1e814549203d56de00b5af8659cb99885472f1f"}, + {file = "tokenizers-0.15.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:7ca22bd897537a0080521445d91a58886c8c04084a6a19e6c78c586e0cfa92a5"}, + {file = "tokenizers-0.15.2.tar.gz", hash = "sha256:e6e9c6e019dd5484be5beafc775ae6c925f4c69a3487040ed09b45e13df2cb91"}, ] [package.dependencies] @@ -999,13 +999,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1048,4 +1048,4 @@ zstd = ["zstandard (>=0.18.0)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "e3e1795eb1e3f7828f431e7882e2c841148a95e0c5ac84f2e6e8623c884eda21" +content-hash = "80ff38d21357b7d1d0dded7726d1d61b342307280838454ded0a7ed1b1220b40" diff --git a/libs/partners/mistralai/pyproject.toml b/libs/partners/mistralai/pyproject.toml index 8f8278e2aaace..393899987e8dd 100644 --- a/libs/partners/mistralai/pyproject.toml +++ b/libs/partners/mistralai/pyproject.toml @@ -13,7 +13,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" langchain-core = "^0.1" -mistralai = "^0.0.11" +mistralai = ">=0.0.11,<0.1" tokenizers = "^0.15.1" [tool.poetry.group.test] @@ -22,7 +22,7 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.3.0" pytest-asyncio = "^0.21.1" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.codespell] optional = true @@ -43,19 +43,19 @@ ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] mypy = "^0.991" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] @@ -63,9 +63,7 @@ select = [ disallow_untyped_defs = "True" [tool.coverage.run] -omit = [ - "tests/*", -] +omit = ["tests/*"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/libs/partners/nomic/langchain_nomic/embeddings.py b/libs/partners/nomic/langchain_nomic/embeddings.py index 36c476df0d717..55152c417f11a 100644 --- a/libs/partners/nomic/langchain_nomic/embeddings.py +++ b/libs/partners/nomic/langchain_nomic/embeddings.py @@ -3,6 +3,7 @@ import nomic # type: ignore from langchain_core.embeddings import Embeddings +from nomic import embed # type: ignore class NomicEmbeddings(Embeddings): @@ -21,6 +22,7 @@ def __init__( *, model: str, nomic_api_key: Optional[str] = None, + dimensionality: Optional[int] = None, ): """Initialize NomicEmbeddings model. @@ -33,6 +35,7 @@ def __init__( if _api_key: nomic.login(_api_key) self.model = model + self.dimensionality = dimensionality def embed(self, texts: List[str], *, task_type: str) -> List[List[float]]: """Embed texts. @@ -42,10 +45,13 @@ def embed(self, texts: List[str], *, task_type: str) -> List[List[float]]: task_type: the task type to use when embedding. One of `search_query`, `search_document`, `classification`, `clustering` """ - # TODO: do this via nomic.embed when fixed in nomic sdk - from nomic import embed - output = embed.text(texts=texts, model=self.model, task_type=task_type) + output = embed.text( + texts=texts, + model=self.model, + task_type=task_type, + dimensionality=self.dimensionality, + ) return output["embeddings"] def embed_documents(self, texts: List[str]) -> List[List[float]]: diff --git a/libs/partners/nomic/poetry.lock b/libs/partners/nomic/poetry.lock index 77c5f064198c6..fa730097704a8 100644 --- a/libs/partners/nomic/poetry.lock +++ b/libs/partners/nomic/poetry.lock @@ -57,13 +57,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -298,7 +298,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.15" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -308,7 +308,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -324,13 +324,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.83" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -453,12 +453,12 @@ files = [ [[package]] name = "nomic" -version = "3.0.7" +version = "3.0.12" description = "The official Nomic python client." optional = false python-versions = "*" files = [ - {file = "nomic-3.0.7.tar.gz", hash = "sha256:7832d224a70ccb31e0ca8921a8f12a92543bb0423ab05f6e4a7ec143c5ceb7d4"}, + {file = "nomic-3.0.12.tar.gz", hash = "sha256:1a90dfa49dfe24c2a0f3b2c4469c83716ff3f11ef778cd612d1744a3f6f9f08d"}, ] [package.dependencies] @@ -563,8 +563,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -680,13 +680,13 @@ xmp = ["defusedxml"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -743,18 +743,18 @@ numpy = ">=1.16.6,<2" [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -762,116 +762,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -968,13 +942,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -997,13 +971,13 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2023.3.post1" +version = "2024.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, - {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -1108,28 +1082,28 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.1.14" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -1156,17 +1130,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -1195,13 +1169,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1226,65 +1200,68 @@ files = [ [[package]] name = "tzdata" -version = "2023.4" +version = "2024.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" files = [ - {file = "tzdata-2023.4-py2.py3-none-any.whl", hash = "sha256:aa3ace4329eeacda5b7beb7ea08ece826c28d761cda36e747cfbf97996d39bf3"}, - {file = "tzdata-2023.4.tar.gz", hash = "sha256:dd54c94f294765522c77399649b4fefd95522479a664a0cec87f41bebc6148c9"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1307,4 +1284,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "93c6f3f4eef4a25d709cf06bcfed145c886b6cfeda1661c103da7cc1429c842f" +content-hash = "5bf9a68cffe3d006de6a5b5b7ea293e1d9893baa28390608f51d4103f639ee99" diff --git a/libs/partners/nomic/pyproject.toml b/libs/partners/nomic/pyproject.toml index e517f54fa1f57..ba16d28f7baae 100644 --- a/libs/partners/nomic/pyproject.toml +++ b/libs/partners/nomic/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-nomic" -version = "0.0.1" +version = "0.0.2" description = "An integration package connecting Nomic and LangChain" authors = [] readme = "README.md" @@ -12,8 +12,8 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" -nomic = "^3.0.7" +langchain-core = "^0.1" +nomic = "^3.0.12" [tool.poetry.group.test] optional = true diff --git a/libs/partners/nomic/tests/integration_tests/test_embeddings.py b/libs/partners/nomic/tests/integration_tests/test_embeddings.py index 6800deb839f38..9fcc7ed9de075 100644 --- a/libs/partners/nomic/tests/integration_tests/test_embeddings.py +++ b/libs/partners/nomic/tests/integration_tests/test_embeddings.py @@ -1,9 +1,10 @@ """Test Nomic embeddings.""" + from langchain_nomic.embeddings import NomicEmbeddings def test_langchain_nomic_embedding_documents() -> None: - """Test cohere embeddings.""" + """Test nomic embeddings.""" documents = ["foo bar"] embedding = NomicEmbeddings(model="nomic-embed-text-v1") output = embedding.embed_documents(documents) @@ -12,8 +13,17 @@ def test_langchain_nomic_embedding_documents() -> None: def test_langchain_nomic_embedding_query() -> None: - """Test cohere embeddings.""" + """Test nomic embeddings.""" document = "foo bar" embedding = NomicEmbeddings(model="nomic-embed-text-v1") output = embedding.embed_query(document) assert len(output) > 0 + + +def test_langchain_nomic_embedding_dimensionality() -> None: + """Test nomic embeddings.""" + documents = ["foo bar"] + embedding = NomicEmbeddings(model="nomic-embed-text-v1.5", dimensionality=256) + output = embedding.embed_documents(documents) + assert len(output) == 1 + assert len(output[0]) == 256 diff --git a/libs/partners/nvidia-ai-endpoints/poetry.lock b/libs/partners/nvidia-ai-endpoints/poetry.lock index 9210309249ed5..15b120b983b14 100644 --- a/libs/partners/nvidia-ai-endpoints/poetry.lock +++ b/libs/partners/nvidia-ai-endpoints/poetry.lock @@ -477,7 +477,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.22" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -1068,13 +1068,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.2.0.20240206" +version = "10.2.0.20240213" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.8" files = [ - {file = "types-Pillow-10.2.0.20240206.tar.gz", hash = "sha256:f0de5107ff8362ffdbbd53ec896202ac905e6ab22ae784b46bcdad160ea143b9"}, - {file = "types_Pillow-10.2.0.20240206-py3-none-any.whl", hash = "sha256:abc339ae28af5916146a7729261480d68ac902cd4ff57e0bdd402eee7962644d"}, + {file = "types-Pillow-10.2.0.20240213.tar.gz", hash = "sha256:4800b61bf7eabdae2f1b17ade0d080709ed33e9f26a2e900e470e8b56ebe2387"}, + {file = "types_Pillow-10.2.0.20240213-py3-none-any.whl", hash = "sha256:062c5a0f20301a30f2df4db583f15b3c2a1283a12518d1f9d81396154e12c1af"}, ] [[package]] @@ -1266,4 +1266,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "ab0b10dcff485da25cd4a2b8de147038237af3920362cc8c749f58409e91c9f8" +content-hash = "d8ea86c4553606cd3ee8068adb0c0eddbc9d845d32a3cf76f8301a12e2d20380" diff --git a/libs/partners/nvidia-ai-endpoints/pyproject.toml b/libs/partners/nvidia-ai-endpoints/pyproject.toml index 41f3bf23c29e6..fa3ac23e19c88 100644 --- a/libs/partners/nvidia-ai-endpoints/pyproject.toml +++ b/libs/partners/nvidia-ai-endpoints/pyproject.toml @@ -12,7 +12,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = "^0.1.0" +langchain-core = "^0.1" aiohttp = "^3.9.1" [tool.poetry.group.test] diff --git a/libs/partners/nvidia-trt/poetry.lock b/libs/partners/nvidia-trt/poetry.lock index 975cafd40344d..d6f5f03812671 100644 --- a/libs/partners/nvidia-trt/poetry.lock +++ b/libs/partners/nvidia-trt/poetry.lock @@ -16,19 +16,20 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} [[package]] name = "anyio" -version = "4.1.0" +version = "4.2.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, - {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, + {file = "anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee"}, + {file = "anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] @@ -37,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "astroid" -version = "3.0.2" +version = "3.0.3" description = "An abstract syntax tree for Python with inference support." optional = false python-versions = ">=3.8.0" files = [ - {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, - {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, + {file = "astroid-3.0.3-py3-none-any.whl", hash = "sha256:92fcf218b89f449cdf9f7b39a269f8d5d617b27be68434912e11e79203963a17"}, + {file = "astroid-3.0.3.tar.gz", hash = "sha256:4148645659b08b70d72460ed1921158027a9e53ae8b7234149b1400eddacbb93"}, ] [package.dependencies] @@ -51,13 +52,13 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -189,17 +190,18 @@ files = [ [[package]] name = "dill" -version = "0.3.7" +version = "0.3.8" description = "serialize all of Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, + {file = "dill-0.3.8-py3-none-any.whl", hash = "sha256:c36ca9ffb54365bdd2f8eb3eff7d2a21237f8452b57ace88b1ac615b7e815bd7"}, + {file = "dill-0.3.8.tar.gz", hash = "sha256:3ebe3c479ad625c4553aca177444d89b486b1d84982eeacded644afc0cf797ca"}, ] [package.extras] graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] [[package]] name = "exceptiongroup" @@ -217,13 +219,13 @@ test = ["pytest (>=6)"] [[package]] name = "freezegun" -version = "1.3.1" +version = "1.4.0" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, - {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -245,86 +247,86 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.40" +version = "3.1.41" description = "GitPython is a Python library used to interact with Git repositories" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.40-py3-none-any.whl", hash = "sha256:cf14627d5a8049ffbf49915732e5eddbe8134c3bdb9d476e6182b676fc573f8a"}, - {file = "GitPython-3.1.40.tar.gz", hash = "sha256:22b126e9ffb671fdd0c129796343a02bf67bf2994b35449ffc9321aa755e18a4"}, + {file = "GitPython-3.1.41-py3-none-any.whl", hash = "sha256:c36b6634d069b3f719610175020a9aed919421c87552185b085e04fbbdb10b7c"}, + {file = "GitPython-3.1.41.tar.gz", hash = "sha256:ed66e624884f76df22c8e16066d567aaa5a37d5b5fa19db2c6df6f7156db9048"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" [package.extras] -test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest", "pytest-cov", "pytest-instafail", "pytest-subtests", "pytest-sugar"] +test = ["black", "coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "sumtypes"] [[package]] name = "grpcio" -version = "1.60.0" +version = "1.60.1" description = "HTTP/2-based RPC framework" optional = false python-versions = ">=3.7" files = [ - {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, - {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, - {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, - {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, - {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, - {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, - {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, - {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, - {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, - {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, - {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, - {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, - {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, - {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, - {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, - {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, - {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, - {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, - {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, - {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, - {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, - {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, - {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, - {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, - {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, - {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, - {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, - {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, - {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, - {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, - {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, - {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, - {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, - {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, - {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, - {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, + {file = "grpcio-1.60.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:14e8f2c84c0832773fb3958240c69def72357bc11392571f87b2d7b91e0bb092"}, + {file = "grpcio-1.60.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:33aed0a431f5befeffd9d346b0fa44b2c01aa4aeae5ea5b2c03d3e25e0071216"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fead980fbc68512dfd4e0c7b1f5754c2a8e5015a04dea454b9cada54a8423525"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:082081e6a36b6eb5cf0fd9a897fe777dbb3802176ffd08e3ec6567edd85bc104"}, + {file = "grpcio-1.60.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55ccb7db5a665079d68b5c7c86359ebd5ebf31a19bc1a91c982fd622f1e31ff2"}, + {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b54577032d4f235452f77a83169b6527bf4b77d73aeada97d45b2aaf1bf5ce0"}, + {file = "grpcio-1.60.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7d142bcd604166417929b071cd396aa13c565749a4c840d6c702727a59d835eb"}, + {file = "grpcio-1.60.1-cp310-cp310-win32.whl", hash = "sha256:2a6087f234cb570008a6041c8ffd1b7d657b397fdd6d26e83d72283dae3527b1"}, + {file = "grpcio-1.60.1-cp310-cp310-win_amd64.whl", hash = "sha256:f2212796593ad1d0235068c79836861f2201fc7137a99aa2fea7beeb3b101177"}, + {file = "grpcio-1.60.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:79ae0dc785504cb1e1788758c588c711f4e4a0195d70dff53db203c95a0bd303"}, + {file = "grpcio-1.60.1-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4eec8b8c1c2c9b7125508ff7c89d5701bf933c99d3910e446ed531cd16ad5d87"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:8c9554ca8e26241dabe7951aa1fa03a1ba0856688ecd7e7bdbdd286ebc272e4c"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91422ba785a8e7a18725b1dc40fbd88f08a5bb4c7f1b3e8739cab24b04fa8a03"}, + {file = "grpcio-1.60.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba6209c96828711cb7c8fcb45ecef8c8859238baf15119daa1bef0f6c84bfe7"}, + {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c71be3f86d67d8d1311c6076a4ba3b75ba5703c0b856b4e691c9097f9b1e8bd2"}, + {file = "grpcio-1.60.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:af5ef6cfaf0d023c00002ba25d0751e5995fa0e4c9eec6cd263c30352662cbce"}, + {file = "grpcio-1.60.1-cp311-cp311-win32.whl", hash = "sha256:a09506eb48fa5493c58f946c46754ef22f3ec0df64f2b5149373ff31fb67f3dd"}, + {file = "grpcio-1.60.1-cp311-cp311-win_amd64.whl", hash = "sha256:49c9b6a510e3ed8df5f6f4f3c34d7fbf2d2cae048ee90a45cd7415abab72912c"}, + {file = "grpcio-1.60.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:b58b855d0071575ea9c7bc0d84a06d2edfbfccec52e9657864386381a7ce1ae9"}, + {file = "grpcio-1.60.1-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:a731ac5cffc34dac62053e0da90f0c0b8560396a19f69d9703e88240c8f05858"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:cf77f8cf2a651fbd869fbdcb4a1931464189cd210abc4cfad357f1cacc8642a6"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c557e94e91a983e5b1e9c60076a8fd79fea1e7e06848eb2e48d0ccfb30f6e073"}, + {file = "grpcio-1.60.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:069fe2aeee02dfd2135d562d0663fe70fbb69d5eed6eb3389042a7e963b54de8"}, + {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb0af13433dbbd1c806e671d81ec75bd324af6ef75171fd7815ca3074fe32bfe"}, + {file = "grpcio-1.60.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2f44c32aef186bbba254129cea1df08a20be414144ac3bdf0e84b24e3f3b2e05"}, + {file = "grpcio-1.60.1-cp312-cp312-win32.whl", hash = "sha256:a212e5dea1a4182e40cd3e4067ee46be9d10418092ce3627475e995cca95de21"}, + {file = "grpcio-1.60.1-cp312-cp312-win_amd64.whl", hash = "sha256:6e490fa5f7f5326222cb9f0b78f207a2b218a14edf39602e083d5f617354306f"}, + {file = "grpcio-1.60.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:4216e67ad9a4769117433814956031cb300f85edc855252a645a9a724b3b6594"}, + {file = "grpcio-1.60.1-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:73e14acd3d4247169955fae8fb103a2b900cfad21d0c35f0dcd0fdd54cd60367"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:6ecf21d20d02d1733e9c820fb5c114c749d888704a7ec824b545c12e78734d1c"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bdea30dcfd4f87b045d404388469eb48a48c33a6195a043d116ed1b9a0196c"}, + {file = "grpcio-1.60.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53b69e79d00f78c81eecfb38f4516080dc7f36a198b6b37b928f1c13b3c063e9"}, + {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:39aa848794b887120b1d35b1b994e445cc028ff602ef267f87c38122c1add50d"}, + {file = "grpcio-1.60.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:72153a0d2e425f45b884540a61c6639436ddafa1829a42056aa5764b84108b8e"}, + {file = "grpcio-1.60.1-cp37-cp37m-win_amd64.whl", hash = "sha256:50d56280b482875d1f9128ce596e59031a226a8b84bec88cb2bf76c289f5d0de"}, + {file = "grpcio-1.60.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:6d140bdeb26cad8b93c1455fa00573c05592793c32053d6e0016ce05ba267549"}, + {file = "grpcio-1.60.1-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:bc808924470643b82b14fe121923c30ec211d8c693e747eba8a7414bc4351a23"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:70c83bb530572917be20c21f3b6be92cd86b9aecb44b0c18b1d3b2cc3ae47df0"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b106bc52e7f28170e624ba61cc7dc6829566e535a6ec68528f8e1afbed1c41f"}, + {file = "grpcio-1.60.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30e980cd6db1088c144b92fe376747328d5554bc7960ce583ec7b7d81cd47287"}, + {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0c5807e9152eff15f1d48f6b9ad3749196f79a4a050469d99eecb679be592acc"}, + {file = "grpcio-1.60.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1c3dc536b3ee124e8b24feb7533e5c70b9f2ef833e3b2e5513b2897fd46763a"}, + {file = "grpcio-1.60.1-cp38-cp38-win32.whl", hash = "sha256:d7404cebcdb11bb5bd40bf94131faf7e9a7c10a6c60358580fe83913f360f929"}, + {file = "grpcio-1.60.1-cp38-cp38-win_amd64.whl", hash = "sha256:c8754c75f55781515a3005063d9a05878b2cfb3cb7e41d5401ad0cf19de14872"}, + {file = "grpcio-1.60.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:0250a7a70b14000fa311de04b169cc7480be6c1a769b190769d347939d3232a8"}, + {file = "grpcio-1.60.1-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:660fc6b9c2a9ea3bb2a7e64ba878c98339abaf1811edca904ac85e9e662f1d73"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:76eaaba891083fcbe167aa0f03363311a9f12da975b025d30e94b93ac7a765fc"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5d97c65ea7e097056f3d1ead77040ebc236feaf7f71489383d20f3b4c28412a"}, + {file = "grpcio-1.60.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb2a2911b028f01c8c64d126f6b632fcd8a9ac975aa1b3855766c94e4107180"}, + {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5a1ebbae7e2214f51b1f23b57bf98eeed2cf1ba84e4d523c48c36d5b2f8829ff"}, + {file = "grpcio-1.60.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9a66f4d2a005bc78e61d805ed95dedfcb35efa84b7bba0403c6d60d13a3de2d6"}, + {file = "grpcio-1.60.1-cp39-cp39-win32.whl", hash = "sha256:8d488fbdbf04283f0d20742b64968d44825617aa6717b07c006168ed16488804"}, + {file = "grpcio-1.60.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b7199cd2a55e62e45bfb629a35b71fc2c0cb88f686a047f25b1112d3810904"}, + {file = "grpcio-1.60.1.tar.gz", hash = "sha256:dd1d3a8d1d2e50ad9b59e10aa7f07c7d1be2b367f3f2d33c5fade96ed5460962"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.60.0)"] +protobuf = ["grpcio-tools (>=1.60.1)"] [[package]] name = "idna" @@ -389,7 +391,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.17" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -399,7 +401,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -415,13 +417,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.84" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.84-py3-none-any.whl", hash = "sha256:9ae1ab777018e2174f68e8f53c88e7a7feb8dbf1c458b473644a3d5e22dc1eb7"}, - {file = "langsmith-0.0.84.tar.gz", hash = "sha256:dd163f89bca14c86759c651a72917c6d45f7dd18435d7bc65dc205a23dd9ec8d"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -564,28 +566,28 @@ files = [ [[package]] name = "platformdirs" -version = "4.1.0" +version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, + {file = "platformdirs-4.2.0-py3-none-any.whl", hash = "sha256:0614df2a2f37e1a662acbd8e2b25b92ccf8632929bc6d43467e17fe89c75e068"}, + {file = "platformdirs-4.2.0.tar.gz", hash = "sha256:ef0cc731df711022c174543cb70a9b5bd22e5a9337c8624ef2c2ceb8ddad8768"}, ] [package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -625,18 +627,18 @@ files = [ [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -644,116 +646,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -791,13 +767,13 @@ testutils = ["gitpython (>3)"] [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -848,13 +824,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -1028,28 +1004,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.8" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"}, - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"}, - {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"}, - {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"}, - {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"}, - {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -1087,17 +1063,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -1163,13 +1139,13 @@ http = ["aiohttp (>=3.8.1,<4.0.0)", "geventhttpclient (>=1.4.4,<=2.0.2)", "numpy [[package]] name = "types-protobuf" -version = "4.24.0.4" +version = "4.24.0.20240129" description = "Typing stubs for protobuf" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-protobuf-4.24.0.4.tar.gz", hash = "sha256:57ab42cb171dfdba2c74bb5b50c250478538cc3c5ed95b8b368929ad0c9f90a5"}, - {file = "types_protobuf-4.24.0.4-py3-none-any.whl", hash = "sha256:131ab7d0cbc9e444bc89c994141327dcce7bcaeded72b1acb72a94827eb9c7af"}, + {file = "types-protobuf-4.24.0.20240129.tar.gz", hash = "sha256:8a83dd3b9b76a33e08d8636c5daa212ace1396418ed91837635fcd564a624891"}, + {file = "types_protobuf-4.24.0.20240129-py3-none-any.whl", hash = "sha256:23be68cc29f3f5213b5c5878ac0151706182874040e220cfb11336f9ee642ead"}, ] [[package]] @@ -1185,54 +1161,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1241,4 +1220,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "14f75d1cbe5c1d54ee8ead4d4db1c43394691d8e166d6b78cb6c15fcfc156e5c" +content-hash = "0e732a2a4dbf0fcdd6dfb5d3764f630df29bb57c06f487c4d4f32dfeb2d11660" diff --git a/libs/partners/nvidia-trt/pyproject.toml b/libs/partners/nvidia-trt/pyproject.toml index 83f9bebb4a0ed..9846299c7da55 100644 --- a/libs/partners/nvidia-trt/pyproject.toml +++ b/libs/partners/nvidia-trt/pyproject.toml @@ -12,8 +12,8 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" -tritonclient = {extras = ["grpc"], version = "^2.42.0"} +langchain-core = "^0.1" +tritonclient = { extras = ["grpc"], version = "^2.42.0" } lint = "^1.2.1" types-protobuf = "^4.24.0.4" protobuf = "^3.5.0" @@ -59,9 +59,9 @@ langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index fc1430e24250e..57e265a6ef4f8 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -1,10 +1,10 @@ """OpenAI chat wrapper.""" + from __future__ import annotations import logging import os import sys -import warnings from typing import ( Any, AsyncIterator, @@ -64,6 +64,7 @@ convert_to_openai_function, convert_to_openai_tool, ) +from langchain_core.utils.utils import build_extra_kwargs logger = logging.getLogger(__name__) @@ -290,25 +291,9 @@ def build_extra(cls, values: Dict[str, Any]) -> Dict[str, Any]: """Build extra kwargs from additional params that were passed in.""" all_required_field_names = get_pydantic_field_names(cls) extra = values.get("model_kwargs", {}) - for field_name in list(values): - if field_name in extra: - raise ValueError(f"Found {field_name} supplied twice.") - if field_name not in all_required_field_names: - warnings.warn( - f"""WARNING! {field_name} is not default parameter. - {field_name} was transferred to model_kwargs. - Please confirm that {field_name} is what you intended.""" - ) - extra[field_name] = values.pop(field_name) - - invalid_model_kwargs = all_required_field_names.intersection(extra.keys()) - if invalid_model_kwargs: - raise ValueError( - f"Parameters {invalid_model_kwargs} should be specified explicitly. " - f"Instead they were passed in as part of `model_kwargs` parameter." - ) - - values["model_kwargs"] = extra + values["model_kwargs"] = build_extra_kwargs( + extra, values, all_required_field_names + ) return values @root_validator() @@ -339,9 +324,11 @@ def validate_environment(cls, values: Dict) -> Dict: ) client_params = { - "api_key": values["openai_api_key"].get_secret_value() - if values["openai_api_key"] - else None, + "api_key": ( + values["openai_api_key"].get_secret_value() + if values["openai_api_key"] + else None + ), "organization": values["openai_organization"], "base_url": values["openai_api_base"], "timeout": values["request_timeout"], diff --git a/libs/partners/openai/langchain_openai/llms/base.py b/libs/partners/openai/langchain_openai/llms/base.py index 9b0ba87d1f46f..a298b048d0e8f 100644 --- a/libs/partners/openai/langchain_openai/llms/base.py +++ b/libs/partners/openai/langchain_openai/llms/base.py @@ -198,9 +198,11 @@ def validate_environment(cls, values: Dict) -> Dict: ) client_params = { - "api_key": values["openai_api_key"].get_secret_value() - if values["openai_api_key"] - else None, + "api_key": ( + values["openai_api_key"].get_secret_value() + if values["openai_api_key"] + else None + ), "organization": values["openai_organization"], "base_url": values["openai_api_base"], "timeout": values["request_timeout"], @@ -257,9 +259,11 @@ def _stream( chunk.text, chunk=chunk, verbose=self.verbose, - logprobs=chunk.generation_info["logprobs"] - if chunk.generation_info - else None, + logprobs=( + chunk.generation_info["logprobs"] + if chunk.generation_info + else None + ), ) async def _astream( @@ -283,9 +287,11 @@ async def _astream( chunk.text, chunk=chunk, verbose=self.verbose, - logprobs=chunk.generation_info["logprobs"] - if chunk.generation_info - else None, + logprobs=( + chunk.generation_info["logprobs"] + if chunk.generation_info + else None + ), ) def _generate( @@ -334,12 +340,16 @@ def _generate( choices.append( { "text": generation.text, - "finish_reason": generation.generation_info.get("finish_reason") - if generation.generation_info - else None, - "logprobs": generation.generation_info.get("logprobs") - if generation.generation_info - else None, + "finish_reason": ( + generation.generation_info.get("finish_reason") + if generation.generation_info + else None + ), + "logprobs": ( + generation.generation_info.get("logprobs") + if generation.generation_info + else None + ), } ) else: @@ -395,12 +405,16 @@ async def _agenerate( choices.append( { "text": generation.text, - "finish_reason": generation.generation_info.get("finish_reason") - if generation.generation_info - else None, - "logprobs": generation.generation_info.get("logprobs") - if generation.generation_info - else None, + "finish_reason": ( + generation.generation_info.get("finish_reason") + if generation.generation_info + else None + ), + "logprobs": ( + generation.generation_info.get("logprobs") + if generation.generation_info + else None + ), } ) else: diff --git a/libs/partners/openai/poetry.lock b/libs/partners/openai/poetry.lock index ee9e59a1249f9..c8e76949df170 100644 --- a/libs/partners/openai/poetry.lock +++ b/libs/partners/openai/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -318,7 +318,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.16" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -328,7 +328,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -344,13 +344,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.83" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -457,13 +457,13 @@ files = [ [[package]] name = "openai" -version = "1.10.0" +version = "1.12.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.10.0-py3-none-any.whl", hash = "sha256:aa69e97d0223ace9835fbf9c997abe9ee95318f684fd2de6d02c870700c71ebc"}, - {file = "openai-1.10.0.tar.gz", hash = "sha256:208886cb501b930dc63f48d51db9c15e5380380f80516d07332adad67c9f1053"}, + {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"}, + {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"}, ] [package.dependencies] @@ -491,13 +491,13 @@ files = [ [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -506,18 +506,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -525,116 +525,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -699,13 +673,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -751,6 +725,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -910,28 +885,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.9" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e6a212f436122ac73df851f0cf006e0c6612fe6f9c864ed17ebefce0eff6a5fd"}, - {file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:28d920e319783d5303333630dae46ecc80b7ba294aeffedf946a02ac0b7cc3db"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:104aa9b5e12cb755d9dce698ab1b97726b83012487af415a4512fedd38b1459e"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e63bf5a4a91971082a4768a0aba9383c12392d0d6f1e2be2248c1f9054a20da"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d0738917c203246f3e275b37006faa3aa96c828b284ebfe3e99a8cb413c8c4b"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69dac82d63a50df2ab0906d97a01549f814b16bc806deeac4f064ff95c47ddf5"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2aec598fb65084e41a9c5d4b95726173768a62055aafb07b4eff976bac72a592"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:744dfe4b35470fa3820d5fe45758aace6269c578f7ddc43d447868cfe5078bcb"}, - {file = "ruff-0.1.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479ca4250cab30f9218b2e563adc362bd6ae6343df7c7b5a7865300a5156d5a6"}, - {file = "ruff-0.1.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:aa8344310f1ae79af9ccd6e4b32749e93cddc078f9b5ccd0e45bd76a6d2e8bb6"}, - {file = "ruff-0.1.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:837c739729394df98f342319f5136f33c65286b28b6b70a87c28f59354ec939b"}, - {file = "ruff-0.1.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e6837202c2859b9f22e43cb01992373c2dbfeae5c0c91ad691a4a2e725392464"}, - {file = "ruff-0.1.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:331aae2cd4a0554667ac683243b151c74bd60e78fb08c3c2a4ac05ee1e606a39"}, - {file = "ruff-0.1.9-py3-none-win32.whl", hash = "sha256:8151425a60878e66f23ad47da39265fc2fad42aed06fb0a01130e967a7a064f4"}, - {file = "ruff-0.1.9-py3-none-win_amd64.whl", hash = "sha256:c497d769164df522fdaf54c6eba93f397342fe4ca2123a2e014a5b8fc7df81c7"}, - {file = "ruff-0.1.9-py3-none-win_arm64.whl", hash = "sha256:0e17f53bcbb4fff8292dfd84cf72d767b5e146f009cccd40c2fad27641f8a7a9"}, - {file = "ruff-0.1.9.tar.gz", hash = "sha256:b041dee2734719ddbb4518f762c982f2e912e7f28b8ee4fe1dee0b15d1b6e800"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -958,17 +933,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -986,47 +961,47 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tiktoken" -version = "0.5.2" +version = "0.6.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.8" files = [ - {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"}, - {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"}, - {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"}, - {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"}, - {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"}, - {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"}, - {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"}, - {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"}, - {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"}, - {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"}, - {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"}, - {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"}, - {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"}, - {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"}, - {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"}, - {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"}, - {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"}, - {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"}, - {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"}, - {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"}, - {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"}, - {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"}, - {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"}, - {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"}, - {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"}, - {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"}, - {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"}, - {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"}, - {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"}, - {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"}, - {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"}, - {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"}, - {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"}, - {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"}, - {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"}, - {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, + {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, + {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, + {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, + {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, + {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, ] [package.dependencies] @@ -1049,13 +1024,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1069,13 +1044,13 @@ telegram = ["requests"] [[package]] name = "types-tqdm" -version = "4.66.0.5" +version = "4.66.0.20240106" description = "Typing stubs for tqdm" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "types-tqdm-4.66.0.5.tar.gz", hash = "sha256:74bd7e469238c28816300f72a9b713d02036f6b557734616430adb7b7e74112c"}, - {file = "types_tqdm-4.66.0.5-py3-none-any.whl", hash = "sha256:d2c38085bec440e8ad1e94e8619f7cb3d1dd0a7ee06a863ccd0610a5945046ef"}, + {file = "types-tqdm-4.66.0.20240106.tar.gz", hash = "sha256:7acf4aade5bad3ded76eb829783f9961b1c2187948eaa6dd1ae8644dff95a938"}, + {file = "types_tqdm-4.66.0.20240106-py3-none-any.whl", hash = "sha256:7459b0f441b969735685645a5d8480f7912b10d05ab45f99a2db8a8e45cb550b"}, ] [[package]] @@ -1091,54 +1066,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1147,4 +1125,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "689f74ee7854ade754369fd7b42f70a60ec167ee68161825b2e128324afbd90b" +content-hash = "b4e8b107da054bb20fbdac062c95897c92df0100d0d387b47719adb93b55355a" diff --git a/libs/partners/openai/pyproject.toml b/libs/partners/openai/pyproject.toml index 74d68cf487710..2308a97cf67af 100644 --- a/libs/partners/openai/pyproject.toml +++ b/libs/partners/openai/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-openai" -version = "0.0.5" +version = "0.0.6" description = "An integration package connecting OpenAI and LangChain" authors = [] readme = "README.md" @@ -12,10 +12,10 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.1.16,<0.2" +langchain-core = "^0.1.16" openai = "^1.10.0" numpy = "^1" -tiktoken = "^0.5.2" +tiktoken = ">=0.5.2,<1" [tool.poetry.group.test] optional = true @@ -23,11 +23,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.3.0" freezegun = "^1.2.2" -pytest-mock = "^3.10.0" +pytest-mock = "^3.10.0" syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.codespell] optional = true @@ -48,20 +48,20 @@ ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] mypy = "^0.991" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } types-tqdm = "^4.66.0.5" [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] @@ -73,9 +73,7 @@ module = "transformers" ignore_missing_imports = true [tool.coverage.run] -omit = [ - "tests/*", -] +omit = ["tests/*"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/libs/partners/pinecone/langchain_pinecone/_utilities.py b/libs/partners/pinecone/langchain_pinecone/_utilities.py index 5f5f734cf6d90..37d61e9fb111c 100644 --- a/libs/partners/pinecone/langchain_pinecone/_utilities.py +++ b/libs/partners/pinecone/langchain_pinecone/_utilities.py @@ -2,7 +2,6 @@ from typing import List, Union import numpy as np -import simsimd # type: ignore Matrix = Union[List[List[float]], List[np.ndarray], np.ndarray] @@ -62,10 +61,20 @@ def cosine_similarity(X: Matrix, Y: Matrix) -> np.ndarray: f"Number of columns in X and Y must be the same. X has shape {X.shape} " f"and Y has shape {Y.shape}." ) + try: + import simsimd as simd # type: ignore - X = np.array(X, dtype=np.float32) - Y = np.array(Y, dtype=np.float32) - Z = 1 - simsimd.cdist(X, Y, metric="cosine") - if isinstance(Z, float): - return np.array([Z]) - return Z + X = np.array(X, dtype=np.float32) + Y = np.array(Y, dtype=np.float32) + Z = 1 - simd.cdist(X, Y, metric="cosine") + if isinstance(Z, float): + return np.array([Z]) + return Z + except ImportError: + X_norm = np.linalg.norm(X, axis=1) + Y_norm = np.linalg.norm(Y, axis=1) + # Ignore divide by zero errors run time warnings as those are handled below. + with np.errstate(divide="ignore", invalid="ignore"): + similarity = np.dot(X, Y.T) / np.outer(X_norm, Y_norm) + similarity[np.isnan(similarity) | np.isinf(similarity)] = 0.0 + return similarity diff --git a/libs/partners/pinecone/poetry.lock b/libs/partners/pinecone/poetry.lock index d82c3dcefcebf..14d48ec651d55 100644 --- a/libs/partners/pinecone/poetry.lock +++ b/libs/partners/pinecone/poetry.lock @@ -38,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -318,7 +318,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.17" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -328,7 +328,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -344,30 +344,30 @@ url = "../../core" [[package]] name = "langchain-openai" -version = "0.0.3" +version = "0.0.6" description = "An integration package connecting OpenAI and LangChain" optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langchain_openai-0.0.3-py3-none-any.whl", hash = "sha256:32d8ae288e212ed47af418ffd216c8af3b8115514bb39127ca9e2910c06fc6b2"}, - {file = "langchain_openai-0.0.3.tar.gz", hash = "sha256:19720510abcd7d6217a47d551def7779dc001aebbf978bda5c03e0a8c8167ac3"}, + {file = "langchain_openai-0.0.6-py3-none-any.whl", hash = "sha256:2ef040e4447a26a9d3bd45dfac9cefa00797ea58555a3d91ab4f88699eb3a005"}, + {file = "langchain_openai-0.0.6.tar.gz", hash = "sha256:f5c4ebe46f2c8635c8f0c26cc8df27700aacafea025410e418d5a080039974dd"}, ] [package.dependencies] -langchain-core = ">=0.1.13,<0.2" +langchain-core = ">=0.1.16,<0.2" numpy = ">=1,<2" -openai = ">=1.6.1,<2.0.0" -tiktoken = ">=0.5.2,<0.6.0" +openai = ">=1.10.0,<2.0.0" +tiktoken = ">=0.5.2,<1" [[package]] name = "langsmith" -version = "0.0.83" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.83-py3-none-any.whl", hash = "sha256:a5bb7ac58c19a415a9d5f51db56dd32ee2cd7343a00825bbc2018312eb3d122a"}, - {file = "langsmith-0.0.83.tar.gz", hash = "sha256:94427846b334ad9bdbec3266fee12903fe9f5448f628667689d0412012aaf392"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -474,13 +474,13 @@ files = [ [[package]] name = "openai" -version = "1.9.0" +version = "1.12.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.9.0-py3-none-any.whl", hash = "sha256:5774a0582ed82f6de92200ed5024e03e272b93e04e9d31caeda5fb80f63df50d"}, - {file = "openai-1.9.0.tar.gz", hash = "sha256:3e9947a544556c051fa138a4def5bd8b468364ec52803c6628532ab949ddce55"}, + {file = "openai-1.12.0-py3-none-any.whl", hash = "sha256:a54002c814e05222e413664f651b5916714e4700d041d5cf5724d3ae1a3e3481"}, + {file = "openai-1.12.0.tar.gz", hash = "sha256:99c5d257d09ea6533d689d1cc77caa0ac679fa21efef8893d8b0832a86877f1b"}, ] [package.dependencies] @@ -508,13 +508,13 @@ files = [ [[package]] name = "pinecone-client" -version = "3.0.1" +version = "3.0.2" description = "Pinecone client and SDK" optional = false python-versions = ">=3.8,<3.13" files = [ - {file = "pinecone_client-3.0.1-py3-none-any.whl", hash = "sha256:c9bb21c23a9088c6198c839be5538ed3f733d152d5fbeaafcc020c1b70b62c2d"}, - {file = "pinecone_client-3.0.1.tar.gz", hash = "sha256:626a0055852c88f1462fc2e132f21d2b078f9a0a74c70b17fe07df3081c6615f"}, + {file = "pinecone_client-3.0.2-py3-none-any.whl", hash = "sha256:72696c883b47c0f65808bf623aebe940c07bc396f2126b627aad63d6e3cb6c43"}, + {file = "pinecone_client-3.0.2.tar.gz", hash = "sha256:f9a0830333eece107b4ef1119de23dad6a61bffab7f238e618416d51c46d29c8"}, ] [package.dependencies] @@ -528,13 +528,13 @@ grpc = ["googleapis-common-protos (>=1.53.0)", "grpc-gateway-protoc-gen-openapiv [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -543,18 +543,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.5.3" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, - {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.6" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -562,116 +562,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.6" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, - {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, - {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, - {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, - {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, - {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, - {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, - {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, - {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, - {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, - {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, - {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:667aa2eac9cd0700af1ddb38b7b1ef246d8cf94c85637cbb03d7757ca4c3fdec"}, - {file = "pydantic_core-2.14.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cdee837710ef6b56ebd20245b83799fce40b265b3b406e51e8ccc5b85b9099b7"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c5bcf3414367e29f83fd66f7de64509a8fd2368b1edf4351e862910727d3e51"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:26a92ae76f75d1915806b77cf459811e772d8f71fd1e4339c99750f0e7f6324f"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a983cca5ed1dd9a35e9e42ebf9f278d344603bfcb174ff99a5815f953925140a"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cb92f9061657287eded380d7dc455bbf115430b3aa4741bdc662d02977e7d0af"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ace1e220b078c8e48e82c081e35002038657e4b37d403ce940fa679e57113b"}, - {file = "pydantic_core-2.14.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef633add81832f4b56d3b4c9408b43d530dfca29e68fb1b797dcb861a2c734cd"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7e90d6cc4aad2cc1f5e16ed56e46cebf4877c62403a311af20459c15da76fd91"}, - {file = "pydantic_core-2.14.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e8a5ac97ea521d7bde7621d86c30e86b798cdecd985723c4ed737a2aa9e77d0c"}, - {file = "pydantic_core-2.14.6-cp312-none-win32.whl", hash = "sha256:f27207e8ca3e5e021e2402ba942e5b4c629718e665c81b8b306f3c8b1ddbb786"}, - {file = "pydantic_core-2.14.6-cp312-none-win_amd64.whl", hash = "sha256:b3e5fe4538001bb82e2295b8d2a39356a84694c97cb73a566dc36328b9f83b40"}, - {file = "pydantic_core-2.14.6-cp312-none-win_arm64.whl", hash = "sha256:64634ccf9d671c6be242a664a33c4acf12882670b09b3f163cd00a24cffbd74e"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:24368e31be2c88bd69340fbfe741b405302993242ccb476c5c3ff48aeee1afe0"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:e33b0834f1cf779aa839975f9d8755a7c2420510c0fa1e9fa0497de77cd35d2c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6af4b3f52cc65f8a0bc8b1cd9676f8c21ef3e9132f21fed250f6958bd7223bed"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d15687d7d7f40333bd8266f3814c591c2e2cd263fa2116e314f60d82086e353a"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:095b707bb287bfd534044166ab767bec70a9bba3175dcdc3371782175c14e43c"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94fc0e6621e07d1e91c44e016cc0b189b48db053061cc22d6298a611de8071bb"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ce830e480f6774608dedfd4a90c42aac4a7af0a711f1b52f807130c2e434c06"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a306cdd2ad3a7d795d8e617a58c3a2ed0f76c8496fb7621b6cd514eb1532cae8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2f5fa187bde8524b1e37ba894db13aadd64faa884657473b03a019f625cee9a8"}, - {file = "pydantic_core-2.14.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:438027a975cc213a47c5d70672e0d29776082155cfae540c4e225716586be75e"}, - {file = "pydantic_core-2.14.6-cp37-none-win32.whl", hash = "sha256:f96ae96a060a8072ceff4cfde89d261837b4294a4f28b84a28765470d502ccc6"}, - {file = "pydantic_core-2.14.6-cp37-none-win_amd64.whl", hash = "sha256:e646c0e282e960345314f42f2cea5e0b5f56938c093541ea6dbf11aec2862391"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:db453f2da3f59a348f514cfbfeb042393b68720787bbef2b4c6068ea362c8149"}, - {file = "pydantic_core-2.14.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3860c62057acd95cc84044e758e47b18dcd8871a328ebc8ccdefd18b0d26a21b"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36026d8f99c58d7044413e1b819a67ca0e0b8ebe0f25e775e6c3d1fabb3c38fb"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8ed1af8692bd8d2a29d702f1a2e6065416d76897d726e45a1775b1444f5928a7"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:314ccc4264ce7d854941231cf71b592e30d8d368a71e50197c905874feacc8a8"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:982487f8931067a32e72d40ab6b47b1628a9c5d344be7f1a4e668fb462d2da42"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dbe357bc4ddda078f79d2a36fc1dd0494a7f2fad83a0a684465b6f24b46fe80"}, - {file = "pydantic_core-2.14.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2f6ffc6701a0eb28648c845f4945a194dc7ab3c651f535b81793251e1185ac3d"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7f5025db12fc6de7bc1104d826d5aee1d172f9ba6ca936bf6474c2148ac336c1"}, - {file = "pydantic_core-2.14.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dab03ed811ed1c71d700ed08bde8431cf429bbe59e423394f0f4055f1ca0ea60"}, - {file = "pydantic_core-2.14.6-cp38-none-win32.whl", hash = "sha256:dfcbebdb3c4b6f739a91769aea5ed615023f3c88cb70df812849aef634c25fbe"}, - {file = "pydantic_core-2.14.6-cp38-none-win_amd64.whl", hash = "sha256:99b14dbea2fdb563d8b5a57c9badfcd72083f6006caf8e126b491519c7d64ca8"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, - {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, - {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, - {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, - {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, - {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, - {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, - {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, - {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, - {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -736,13 +710,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -948,64 +922,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.14" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:96f76536df9b26622755c12ed8680f159817be2f725c17ed9305b472a757cdbb"}, - {file = "ruff-0.1.14-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab3f71f64498c7241123bb5a768544cf42821d2a537f894b22457a543d3ca7a9"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060156ecc572b8f984fd20fd8b0fcb692dd5d837b7606e968334ab7ff0090ab"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a53d8e35313d7b67eb3db15a66c08434809107659226a90dcd7acb2afa55faea"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bea9be712b8f5b4ebed40e1949379cfb2a7d907f42921cf9ab3aae07e6fba9eb"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:2270504d629a0b064247983cbc495bed277f372fb9eaba41e5cf51f7ba705a6a"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80258bb3b8909b1700610dfabef7876423eed1bc930fe177c71c414921898efa"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:653230dd00aaf449eb5ff25d10a6e03bc3006813e2cb99799e568f55482e5cae"}, - {file = "ruff-0.1.14-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b3acc6c4e6928459ba9eb7459dd4f0c4bf266a053c863d72a44c33246bfdbf"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6b3dadc9522d0eccc060699a9816e8127b27addbb4697fc0c08611e4e6aeb8b5"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1c8eca1a47b4150dc0fbec7fe68fc91c695aed798532a18dbb1424e61e9b721f"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_i686.whl", hash = "sha256:62ce2ae46303ee896fc6811f63d6dabf8d9c389da0f3e3f2bce8bc7f15ef5488"}, - {file = "ruff-0.1.14-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b2027dde79d217b211d725fc833e8965dc90a16d0d3213f1298f97465956661b"}, - {file = "ruff-0.1.14-py3-none-win32.whl", hash = "sha256:722bafc299145575a63bbd6b5069cb643eaa62546a5b6398f82b3e4403329cab"}, - {file = "ruff-0.1.14-py3-none-win_amd64.whl", hash = "sha256:e3d241aa61f92b0805a7082bd89a9990826448e4d0398f0e2bc8f05c75c63d99"}, - {file = "ruff-0.1.14-py3-none-win_arm64.whl", hash = "sha256:269302b31ade4cde6cf6f9dd58ea593773a37ed3f7b97e793c8594b262466b67"}, - {file = "ruff-0.1.14.tar.gz", hash = "sha256:ad3f8088b2dfd884820289a06ab718cde7d38b94972212cc4ba90d5fbc9955f3"}, -] - -[[package]] -name = "simsimd" -version = "3.6.3" -description = "Fastest SIMD-Accelerated Vector Similarity Functions for x86 and Arm" -optional = false -python-versions = "*" -files = [ - {file = "simsimd-3.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:107bce051ce8400873c12581502f904a1065f2c2c33b014d78b404a766790d25"}, - {file = "simsimd-3.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5418ab23eeda2ab5fe8bb51ee02013e9fa382ee595a7836c204117ed648ed6ff"}, - {file = "simsimd-3.6.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5d97b5676936bb82db2a0d829c3da23d8c496d965a889f493bb1c34b8f06ed25"}, - {file = "simsimd-3.6.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:9dcfaa380dff710a74b252796475942a40d87b58917b3e48aa69e6a478e306f0"}, - {file = "simsimd-3.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:f7608db9712864f6405c9fa6475c8f56ca357d1b76d18e2b24186970312e5bbd"}, - {file = "simsimd-3.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81ac4252ef3f7c7a43b65041d590953cd4a3b62a94f582e0febcd6c830430f7b"}, - {file = "simsimd-3.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9bf67762caeceb2340ca73ffe3d2a386b8841288e76ad6964f6c87a6e0fad648"}, - {file = "simsimd-3.6.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:e3809a0d4430d8d5c9bae1c6d418f2356423ef497e891d71902fb759d9d9e1ef"}, - {file = "simsimd-3.6.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e31608e25c80406620174ade45a07fe0af67520352c0e8f16ca4b59488ea97cd"}, - {file = "simsimd-3.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:7400f2549a58744c0c61dd62532686fc40bee49efcbfb5ec163fd942a3d00c09"}, - {file = "simsimd-3.6.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:56dbc4726ce2a07ae2b9f9121784d60944d1f980d8eca086c6c2e152ecc3c389"}, - {file = "simsimd-3.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:07138cec69335960b202850f4c92419f2f9316bc4bf3995ba36935043c450527"}, - {file = "simsimd-3.6.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e2d2a7661d364928e7bdf45524e793df04b8f5114b112400310f9c84996b3537"}, - {file = "simsimd-3.6.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6dc612fcef38ca0e5d6dec26f9d4e254b2f86692344cb929195f8c3bc0178e9c"}, - {file = "simsimd-3.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:9dd142ce8d8e3191a6229cd69d571b32f3f9859cac5236013942a8d090e1e0ef"}, - {file = "simsimd-3.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a6a34de7067fea77f60a8e88ff572ad017e664c58811c36107c62ad82d73cbad"}, - {file = "simsimd-3.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:f32724f8128220bb6d296dede90db5473ed1e0c51d62ebbac8e9d76faa1956ec"}, - {file = "simsimd-3.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:739d0c145d9f1127cec1206e20f57dfeb65e6f541614305ff3d247516a4f323e"}, - {file = "simsimd-3.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bf631d942985d2e60f2fcc81b3030f45e6e19c38d91d6c8d849aefcf5505723e"}, - {file = "simsimd-3.6.3-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0cb9932f8ca0a9a375baf593e4e6ec3ee4c91bf9ff4e7d6905efbc93954365df"}, - {file = "simsimd-3.6.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:882b5a9c900de57c33cfcb0dc5584ccd5f70cea87ad2ac5365b1548ede7e6dda"}, - {file = "simsimd-3.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:87b2761ac7b0264e4dfefdb4cf0a725cd0c349a6c480cf39f0cdb9741487f78b"}, - {file = "simsimd-3.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1efee420355204798eb44f6355f7447b76a3d1941357620cb2187170e6e397e6"}, - {file = "simsimd-3.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c54dfa652438f96ca0b75ca2d0ac4c241d5f4aa9a9d4da5a0b633ecee607cac9"}, - {file = "simsimd-3.6.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a5170a2865391138cafa1aa5084082a450c0041fcd9efd8df8e82df82956491a"}, - {file = "simsimd-3.6.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:a738662bb8c45792fda624ba51d0e6a02fb3750b3339c18fbb31b7f44ac7d6d2"}, - {file = "simsimd-3.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:54306a63ffdea756f28c3085330462d739ed46c5a36d827473bbd711557c37d3"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -1032,17 +970,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -1060,47 +998,47 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "tiktoken" -version = "0.5.2" +version = "0.6.0" description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" optional = false python-versions = ">=3.8" files = [ - {file = "tiktoken-0.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c4e654282ef05ec1bd06ead22141a9a1687991cef2c6a81bdd1284301abc71d"}, - {file = "tiktoken-0.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7b3134aa24319f42c27718c6967f3c1916a38a715a0fa73d33717ba121231307"}, - {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6092e6e77730929c8c6a51bb0d7cfdf1b72b63c4d033d6258d1f2ee81052e9e5"}, - {file = "tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72ad8ae2a747622efae75837abba59be6c15a8f31b4ac3c6156bc56ec7a8e631"}, - {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51cba7c8711afa0b885445f0637f0fcc366740798c40b981f08c5f984e02c9d1"}, - {file = "tiktoken-0.5.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3d8c7d2c9313f8e92e987d585ee2ba0f7c40a0de84f4805b093b634f792124f5"}, - {file = "tiktoken-0.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:692eca18c5fd8d1e0dde767f895c17686faaa102f37640e884eecb6854e7cca7"}, - {file = "tiktoken-0.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:138d173abbf1ec75863ad68ca289d4da30caa3245f3c8d4bfb274c4d629a2f77"}, - {file = "tiktoken-0.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7388fdd684690973fdc450b47dfd24d7f0cbe658f58a576169baef5ae4658607"}, - {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a114391790113bcff670c70c24e166a841f7ea8f47ee2fe0e71e08b49d0bf2d4"}, - {file = "tiktoken-0.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca96f001e69f6859dd52926d950cfcc610480e920e576183497ab954e645e6ac"}, - {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:15fed1dd88e30dfadcdd8e53a8927f04e1f6f81ad08a5ca824858a593ab476c7"}, - {file = "tiktoken-0.5.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f8e692db5756f7ea8cb0cfca34638316dcf0841fb8469de8ed7f6a015ba0b0"}, - {file = "tiktoken-0.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:bcae1c4c92df2ffc4fe9f475bf8148dbb0ee2404743168bbeb9dcc4b79dc1fdd"}, - {file = "tiktoken-0.5.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b76a1e17d4eb4357d00f0622d9a48ffbb23401dcf36f9716d9bd9c8e79d421aa"}, - {file = "tiktoken-0.5.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01d8b171bb5df4035580bc26d4f5339a6fd58d06f069091899d4a798ea279d3e"}, - {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42adf7d4fb1ed8de6e0ff2e794a6a15005f056a0d83d22d1d6755a39bffd9e7f"}, - {file = "tiktoken-0.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c3f894dbe0adb44609f3d532b8ea10820d61fdcb288b325a458dfc60fefb7db"}, - {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:58ccfddb4e62f0df974e8f7e34a667981d9bb553a811256e617731bf1d007d19"}, - {file = "tiktoken-0.5.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58902a8bad2de4268c2a701f1c844d22bfa3cbcc485b10e8e3e28a050179330b"}, - {file = "tiktoken-0.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:5e39257826d0647fcac403d8fa0a474b30d02ec8ffc012cfaf13083e9b5e82c5"}, - {file = "tiktoken-0.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8bde3b0fbf09a23072d39c1ede0e0821f759b4fa254a5f00078909158e90ae1f"}, - {file = "tiktoken-0.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2ddee082dcf1231ccf3a591d234935e6acf3e82ee28521fe99af9630bc8d2a60"}, - {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35c057a6a4e777b5966a7540481a75a31429fc1cb4c9da87b71c8b75b5143037"}, - {file = "tiktoken-0.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c4a049b87e28f1dc60509f8eb7790bc8d11f9a70d99b9dd18dfdd81a084ffe6"}, - {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5bf5ce759089f4f6521ea6ed89d8f988f7b396e9f4afb503b945f5c949c6bec2"}, - {file = "tiktoken-0.5.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0c964f554af1a96884e01188f480dad3fc224c4bbcf7af75d4b74c4b74ae0125"}, - {file = "tiktoken-0.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:368dd5726d2e8788e47ea04f32e20f72a2012a8a67af5b0b003d1e059f1d30a3"}, - {file = "tiktoken-0.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a2deef9115b8cd55536c0a02c0203512f8deb2447f41585e6d929a0b878a0dd2"}, - {file = "tiktoken-0.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2ed7d380195affbf886e2f8b92b14edfe13f4768ff5fc8de315adba5b773815e"}, - {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c76fce01309c8140ffe15eb34ded2bb94789614b7d1d09e206838fc173776a18"}, - {file = "tiktoken-0.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60a5654d6a2e2d152637dd9a880b4482267dfc8a86ccf3ab1cec31a8c76bfae8"}, - {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:41d4d3228e051b779245a8ddd21d4336f8975563e92375662f42d05a19bdff41"}, - {file = "tiktoken-0.5.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c1cdec2c92fcde8c17a50814b525ae6a88e8e5b02030dc120b76e11db93f13"}, - {file = "tiktoken-0.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:84ddb36faedb448a50b246e13d1b6ee3437f60b7169b723a4b2abad75e914f3e"}, - {file = "tiktoken-0.5.2.tar.gz", hash = "sha256:f54c581f134a8ea96ce2023ab221d4d4d81ab614efa0b2fbce926387deb56c80"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:277de84ccd8fa12730a6b4067456e5cf72fef6300bea61d506c09e45658d41ac"}, + {file = "tiktoken-0.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9c44433f658064463650d61387623735641dcc4b6c999ca30bc0f8ba3fccaf5c"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afb9a2a866ae6eef1995ab656744287a5ac95acc7e0491c33fad54d053288ad3"}, + {file = "tiktoken-0.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c62c05b3109fefca26fedb2820452a050074ad8e5ad9803f4652977778177d9f"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ef917fad0bccda07bfbad835525bbed5f3ab97a8a3e66526e48cdc3e7beacf7"}, + {file = "tiktoken-0.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e095131ab6092d0769a2fda85aa260c7c383072daec599ba9d8b149d2a3f4d8b"}, + {file = "tiktoken-0.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:05b344c61779f815038292a19a0c6eb7098b63c8f865ff205abb9ea1b656030e"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cefb9870fb55dca9e450e54dbf61f904aab9180ff6fe568b61f4db9564e78871"}, + {file = "tiktoken-0.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:702950d33d8cabc039845674107d2e6dcabbbb0990ef350f640661368df481bb"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8d49d076058f23254f2aff9af603863c5c5f9ab095bc896bceed04f8f0b013a"}, + {file = "tiktoken-0.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:430bc4e650a2d23a789dc2cdca3b9e5e7eb3cd3935168d97d43518cbb1f9a911"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:293cb8669757301a3019a12d6770bd55bec38a4d3ee9978ddbe599d68976aca7"}, + {file = "tiktoken-0.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bd1a288b7903aadc054b0e16ea78e3171f70b670e7372432298c686ebf9dd47"}, + {file = "tiktoken-0.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac76e000183e3b749634968a45c7169b351e99936ef46f0d2353cd0d46c3118d"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17cc8a4a3245ab7d935c83a2db6bb71619099d7284b884f4b2aea4c74f2f83e3"}, + {file = "tiktoken-0.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:284aebcccffe1bba0d6571651317df6a5b376ff6cfed5aeb800c55df44c78177"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c1a3a5d33846f8cd9dd3b7897c1d45722f48625a587f8e6f3d3e85080559be8"}, + {file = "tiktoken-0.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6318b2bb2337f38ee954fd5efa82632c6e5ced1d52a671370fa4b2eff1355e91"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f5f0f2ed67ba16373f9a6013b68da298096b27cd4e1cf276d2d3868b5c7efd1"}, + {file = "tiktoken-0.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:75af4c0b16609c2ad02581f3cdcd1fb698c7565091370bf6c0cf8624ffaba6dc"}, + {file = "tiktoken-0.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:45577faf9a9d383b8fd683e313cf6df88b6076c034f0a16da243bb1c139340c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7c1492ab90c21ca4d11cef3a236ee31a3e279bb21b3fc5b0e2210588c4209e68"}, + {file = "tiktoken-0.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e2b380c5b7751272015400b26144a2bab4066ebb8daae9c3cd2a92c3b508fe5a"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f497598b9f58c99cbc0eb764b4a92272c14d5203fc713dd650b896a03a50ad"}, + {file = "tiktoken-0.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e65e8bd6f3f279d80f1e1fbd5f588f036b9a5fa27690b7f0cc07021f1dfa0839"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5f1495450a54e564d236769d25bfefbf77727e232d7a8a378f97acddee08c1ae"}, + {file = "tiktoken-0.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6c4e4857d99f6fb4670e928250835b21b68c59250520a1941618b5b4194e20c3"}, + {file = "tiktoken-0.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:168d718f07a39b013032741867e789971346df8e89983fe3c0ef3fbd5a0b1cb9"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:47fdcfe11bd55376785a6aea8ad1db967db7f66ea81aed5c43fad497521819a4"}, + {file = "tiktoken-0.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb7d2ccbf1a7784810aff6b80b4012fb42c6fc37eaa68cb3b553801a5cc2d1fc"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ccb7a111ee76af5d876a729a347f8747d5ad548e1487eeea90eaf58894b3138"}, + {file = "tiktoken-0.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2048e1086b48e3c8c6e2ceeac866561374cd57a84622fa49a6b245ffecb7744"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07f229a5eb250b6403a61200199cecf0aac4aa23c3ecc1c11c1ca002cbb8f159"}, + {file = "tiktoken-0.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:432aa3be8436177b0db5a2b3e7cc28fd6c693f783b2f8722539ba16a867d0c6a"}, + {file = "tiktoken-0.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bfe8a19c8b5c40d121ee7938cd9c6a278e5b97dc035fd61714b4f0399d2f7a1"}, + {file = "tiktoken-0.6.0.tar.gz", hash = "sha256:ace62a4ede83c75b0374a2ddfa4b76903cf483e9cb06247f566be3bf14e6beed"}, ] [package.dependencies] @@ -1123,13 +1061,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1154,54 +1092,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1209,5 +1150,5 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" -python-versions = ">=3.8.1,<4.0" -content-hash = "9b38ca11dda09f4e7c1456e73ca69585b017cfd811ad9e50653f58d5bc94cf90" +python-versions = ">=3.8.1,<3.13" +content-hash = "e12831055a5ebb19af506df742ac22c57c80684145e3a35e6ae82c848150c676" diff --git a/libs/partners/pinecone/pyproject.toml b/libs/partners/pinecone/pyproject.toml index 53d082d62b89f..83b435f3192bb 100644 --- a/libs/partners/pinecone/pyproject.toml +++ b/libs/partners/pinecone/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "langchain-pinecone" -version = "0.0.1" +version = "0.0.2" description = "An integration package connecting Pinecone and LangChain" authors = [] readme = "README.md" @@ -11,10 +11,10 @@ license = "MIT" "Source Code" = "https://github.com/langchain-ai/langchain/tree/master/libs/partners/pinecone" [tool.poetry.dependencies] -python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" -pinecone-client = { version = "^3", python = ">=3.8,<3.13" } -simsimd = "^3.6.3" +# <3.13 is due to restriction in pinecone-client package +python = ">=3.8.1,<3.13" +langchain-core = "^0.1" +pinecone-client = { version = "^3" } numpy = "^1" [tool.poetry.group.test] diff --git a/libs/partners/robocorp/poetry.lock b/libs/partners/robocorp/poetry.lock index 5e35940466351..20b60ed600a6e 100644 --- a/libs/partners/robocorp/poetry.lock +++ b/libs/partners/robocorp/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "annotated-types" @@ -38,13 +38,13 @@ trio = ["trio (>=0.23)"] [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -246,12 +246,11 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" files = [ {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, - {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, ] [[package]] name = "langchain-core" -version = "0.1.17" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -261,7 +260,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = ">=0.0.83,<0.1" +langsmith = "^0.1.0 || ^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -277,13 +276,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.84" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.84-py3-none-any.whl", hash = "sha256:9ae1ab777018e2174f68e8f53c88e7a7feb8dbf1c458b473644a3d5e22dc1eb7"}, - {file = "langsmith-0.0.84.tar.gz", hash = "sha256:dd163f89bca14c86759c651a72917c6d45f7dd18435d7bc65dc205a23dd9ec8d"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -379,18 +378,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.6.0" +version = "2.6.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.6.0-py3-none-any.whl", hash = "sha256:1440966574e1b5b99cf75a13bec7b20e3512e8a61b894ae252f56275e2c465ae"}, - {file = "pydantic-2.6.0.tar.gz", hash = "sha256:ae887bd94eb404b09d86e4d12f93893bdca79d766e738528c6fa1c849f3c6bcf"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.16.1" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -398,90 +397,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.16.1" +version = "2.16.2" description = "" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:300616102fb71241ff477a2cbbc847321dbec49428434a2f17f37528721c4948"}, - {file = "pydantic_core-2.16.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5511f962dd1b9b553e9534c3b9c6a4b0c9ded3d8c2be96e61d56f933feef9e1f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98f0edee7ee9cc7f9221af2e1b95bd02810e1c7a6d115cfd82698803d385b28f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9795f56aa6b2296f05ac79d8a424e94056730c0b860a62b0fdcfe6340b658cc8"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c45f62e4107ebd05166717ac58f6feb44471ed450d07fecd90e5f69d9bf03c48"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:462d599299c5971f03c676e2b63aa80fec5ebc572d89ce766cd11ca8bcb56f3f"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ebaa4bf6386a3b22eec518da7d679c8363fb7fb70cf6972161e5542f470798"}, - {file = "pydantic_core-2.16.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:99f9a50b56713a598d33bc23a9912224fc5d7f9f292444e6664236ae471ddf17"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8ec364e280db4235389b5e1e6ee924723c693cbc98e9d28dc1767041ff9bc388"}, - {file = "pydantic_core-2.16.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:653a5dfd00f601a0ed6654a8b877b18d65ac32c9d9997456e0ab240807be6cf7"}, - {file = "pydantic_core-2.16.1-cp310-none-win32.whl", hash = "sha256:1661c668c1bb67b7cec96914329d9ab66755911d093bb9063c4c8914188af6d4"}, - {file = "pydantic_core-2.16.1-cp310-none-win_amd64.whl", hash = "sha256:561be4e3e952c2f9056fba5267b99be4ec2afadc27261505d4992c50b33c513c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:102569d371fadc40d8f8598a59379c37ec60164315884467052830b28cc4e9da"}, - {file = "pydantic_core-2.16.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:735dceec50fa907a3c314b84ed609dec54b76a814aa14eb90da31d1d36873a5e"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e83ebbf020be727d6e0991c1b192a5c2e7113eb66e3def0cd0c62f9f266247e4"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a8259569fbeec49cfac7fda3ec8123486ef1b729225222f0d41d5f840b476f"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920c4897e55e2881db6a6da151198e5001552c3777cd42b8a4c2f72eedc2ee91"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5247a3d74355f8b1d780d0f3b32a23dd9f6d3ff43ef2037c6dcd249f35ecf4c"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5bea8012df5bb6dda1e67d0563ac50b7f64a5d5858348b5c8cb5043811c19d"}, - {file = "pydantic_core-2.16.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ed3025a8a7e5a59817b7494686d449ebfbe301f3e757b852c8d0d1961d6be864"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06f0d5a1d9e1b7932477c172cc720b3b23c18762ed7a8efa8398298a59d177c7"}, - {file = "pydantic_core-2.16.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:150ba5c86f502c040b822777e2e519b5625b47813bd05f9273a8ed169c97d9ae"}, - {file = "pydantic_core-2.16.1-cp311-none-win32.whl", hash = "sha256:d6cbdf12ef967a6aa401cf5cdf47850559e59eedad10e781471c960583f25aa1"}, - {file = "pydantic_core-2.16.1-cp311-none-win_amd64.whl", hash = "sha256:afa01d25769af33a8dac0d905d5c7bb2d73c7c3d5161b2dd6f8b5b5eea6a3c4c"}, - {file = "pydantic_core-2.16.1-cp311-none-win_arm64.whl", hash = "sha256:1a2fe7b00a49b51047334d84aafd7e39f80b7675cad0083678c58983662da89b"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:0f478ec204772a5c8218e30eb813ca43e34005dff2eafa03931b3d8caef87d51"}, - {file = "pydantic_core-2.16.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f1936ef138bed2165dd8573aa65e3095ef7c2b6247faccd0e15186aabdda7f66"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99d3a433ef5dc3021c9534a58a3686c88363c591974c16c54a01af7efd741f13"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd88f40f2294440d3f3c6308e50d96a0d3d0973d6f1a5732875d10f569acef49"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fac641bbfa43d5a1bed99d28aa1fded1984d31c670a95aac1bf1d36ac6ce137"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:72bf9308a82b75039b8c8edd2be2924c352eda5da14a920551a8b65d5ee89253"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb4363e6c9fc87365c2bc777a1f585a22f2f56642501885ffc7942138499bf54"}, - {file = "pydantic_core-2.16.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:20f724a023042588d0f4396bbbcf4cffd0ddd0ad3ed4f0d8e6d4ac4264bae81e"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fb4370b15111905bf8b5ba2129b926af9470f014cb0493a67d23e9d7a48348e8"}, - {file = "pydantic_core-2.16.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23632132f1fd608034f1a56cc3e484be00854db845b3a4a508834be5a6435a6f"}, - {file = "pydantic_core-2.16.1-cp312-none-win32.whl", hash = "sha256:b9f3e0bffad6e238f7acc20c393c1ed8fab4371e3b3bc311020dfa6020d99212"}, - {file = "pydantic_core-2.16.1-cp312-none-win_amd64.whl", hash = "sha256:a0b4cfe408cd84c53bab7d83e4209458de676a6ec5e9c623ae914ce1cb79b96f"}, - {file = "pydantic_core-2.16.1-cp312-none-win_arm64.whl", hash = "sha256:d195add190abccefc70ad0f9a0141ad7da53e16183048380e688b466702195dd"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:502c062a18d84452858f8aea1e520e12a4d5228fc3621ea5061409d666ea1706"}, - {file = "pydantic_core-2.16.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d8c032ccee90b37b44e05948b449a2d6baed7e614df3d3f47fe432c952c21b60"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:920f4633bee43d7a2818e1a1a788906df5a17b7ab6fe411220ed92b42940f818"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f5d37ff01edcbace53a402e80793640c25798fb7208f105d87a25e6fcc9ea06"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:399166f24c33a0c5759ecc4801f040dbc87d412c1a6d6292b2349b4c505effc9"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac89ccc39cd1d556cc72d6752f252dc869dde41c7c936e86beac5eb555041b66"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73802194f10c394c2bedce7a135ba1d8ba6cff23adf4217612bfc5cf060de34c"}, - {file = "pydantic_core-2.16.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8fa00fa24ffd8c31fac081bf7be7eb495be6d248db127f8776575a746fa55c95"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:601d3e42452cd4f2891c13fa8c70366d71851c1593ed42f57bf37f40f7dca3c8"}, - {file = "pydantic_core-2.16.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07982b82d121ed3fc1c51faf6e8f57ff09b1325d2efccaa257dd8c0dd937acca"}, - {file = "pydantic_core-2.16.1-cp38-none-win32.whl", hash = "sha256:d0bf6f93a55d3fa7a079d811b29100b019784e2ee6bc06b0bb839538272a5610"}, - {file = "pydantic_core-2.16.1-cp38-none-win_amd64.whl", hash = "sha256:fbec2af0ebafa57eb82c18c304b37c86a8abddf7022955d1742b3d5471a6339e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a497be217818c318d93f07e14502ef93d44e6a20c72b04c530611e45e54c2196"}, - {file = "pydantic_core-2.16.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:694a5e9f1f2c124a17ff2d0be613fd53ba0c26de588eb4bdab8bca855e550d95"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d4dfc66abea3ec6d9f83e837a8f8a7d9d3a76d25c9911735c76d6745950e62c"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8655f55fe68c4685673265a650ef71beb2d31871c049c8b80262026f23605ee3"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21e3298486c4ea4e4d5cc6fb69e06fb02a4e22089304308817035ac006a7f506"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:71b4a48a7427f14679f0015b13c712863d28bb1ab700bd11776a5368135c7d60"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dca874e35bb60ce4f9f6665bfbfad050dd7573596608aeb9e098621ac331dc"}, - {file = "pydantic_core-2.16.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa496cd45cda0165d597e9d6f01e36c33c9508f75cf03c0a650018c5048f578e"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5317c04349472e683803da262c781c42c5628a9be73f4750ac7d13040efb5d2d"}, - {file = "pydantic_core-2.16.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42c29d54ed4501a30cd71015bf982fa95e4a60117b44e1a200290ce687d3e640"}, - {file = "pydantic_core-2.16.1-cp39-none-win32.whl", hash = "sha256:ba07646f35e4e49376c9831130039d1b478fbfa1215ae62ad62d2ee63cf9c18f"}, - {file = "pydantic_core-2.16.1-cp39-none-win_amd64.whl", hash = "sha256:2133b0e412a47868a358713287ff9f9a328879da547dc88be67481cdac529118"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d25ef0c33f22649b7a088035fd65ac1ce6464fa2876578df1adad9472f918a76"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:99c095457eea8550c9fa9a7a992e842aeae1429dab6b6b378710f62bfb70b394"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b49c604ace7a7aa8af31196abbf8f2193be605db6739ed905ecaf62af31ccae0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c56da23034fe66221f2208c813d8aa509eea34d97328ce2add56e219c3a9f41c"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cebf8d56fee3b08ad40d332a807ecccd4153d3f1ba8231e111d9759f02edfd05"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:1ae8048cba95f382dba56766525abca438328455e35c283bb202964f41a780b0"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:780daad9e35b18d10d7219d24bfb30148ca2afc309928e1d4d53de86822593dc"}, - {file = "pydantic_core-2.16.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c94b5537bf6ce66e4d7830c6993152940a188600f6ae044435287753044a8fe2"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:adf28099d061a25fbcc6531febb7a091e027605385de9fe14dd6a97319d614cf"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:644904600c15816a1f9a1bafa6aab0d21db2788abcdf4e2a77951280473f33e1"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87bce04f09f0552b66fca0c4e10da78d17cb0e71c205864bab4e9595122cb9d9"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877045a7969ace04d59516d5d6a7dee13106822f99a5d8df5e6822941f7bedc8"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9c46e556ee266ed3fb7b7a882b53df3c76b45e872fdab8d9cf49ae5e91147fd7"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4eebbd049008eb800f519578e944b8dc8e0f7d59a5abb5924cc2d4ed3a1834ff"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c0be58529d43d38ae849a91932391eb93275a06b93b79a8ab828b012e916a206"}, - {file = "pydantic_core-2.16.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1fc07896fc1851558f532dffc8987e526b682ec73140886c831d773cef44b76"}, - {file = "pydantic_core-2.16.1.tar.gz", hash = "sha256:daff04257b49ab7f4b3f73f98283d3dbb1a65bf3500d55c7beac3c66c310fe34"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -585,7 +584,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -593,15 +591,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -618,7 +609,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -626,7 +616,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -703,17 +692,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tenacity" @@ -767,54 +756,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -823,4 +815,4 @@ watchmedo = ["PyYAML (>=3.10)"] [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "5437244d2d09ba0c8ee42518bd9cff83ebe4f597a13fb772b24a4417f3f1a431" +content-hash = "75f4075fd17085ea5a72c7a7dcb7d2086baada0772329fe99c746e400cb9e81c" diff --git a/libs/partners/robocorp/pyproject.toml b/libs/partners/robocorp/pyproject.toml index 581005cfa0c76..16a024b4448b0 100644 --- a/libs/partners/robocorp/pyproject.toml +++ b/libs/partners/robocorp/pyproject.toml @@ -12,10 +12,9 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" +langchain-core = "^0.1" requests = "^2.31.0" types-requests = "^2.31.0.6" -langsmith = ">=0.0.83,<0.1" [tool.poetry.group.test] optional = true @@ -23,11 +22,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.3.0" freezegun = "^1.2.2" -pytest-mock = "^3.10.0" +pytest-mock = "^3.10.0" syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.codespell] optional = true @@ -48,19 +47,19 @@ ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] mypy = "^0.991" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] @@ -68,9 +67,7 @@ select = [ disallow_untyped_defs = "True" [tool.coverage.run] -omit = [ - "tests/*", -] +omit = ["tests/*"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/libs/partners/together/poetry.lock b/libs/partners/together/poetry.lock index e9f21bcb494fb..7892153f8ea39 100644 --- a/libs/partners/together/poetry.lock +++ b/libs/partners/together/poetry.lock @@ -2,87 +2,87 @@ [[package]] name = "aiohttp" -version = "3.9.1" +version = "3.9.3" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1f80197f8b0b846a8d5cf7b7ec6084493950d0882cc5537fb7b96a69e3c8590"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72444d17777865734aa1a4d167794c34b63e5883abb90356a0364a28904e6c0"}, - {file = "aiohttp-3.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b05d5cbe9dafcdc733262c3a99ccf63d2f7ce02543620d2bd8db4d4f7a22f83"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c4fa235d534b3547184831c624c0b7c1e262cd1de847d95085ec94c16fddcd5"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:289ba9ae8e88d0ba16062ecf02dd730b34186ea3b1e7489046fc338bdc3361c4"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bff7e2811814fa2271be95ab6e84c9436d027a0e59665de60edf44e529a42c1f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81b77f868814346662c96ab36b875d7814ebf82340d3284a31681085c051320f"}, - {file = "aiohttp-3.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b9c7426923bb7bd66d409da46c41e3fb40f5caf679da624439b9eba92043fa6"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8d44e7bf06b0c0a70a20f9100af9fcfd7f6d9d3913e37754c12d424179b4e48f"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22698f01ff5653fe66d16ffb7658f582a0ac084d7da1323e39fd9eab326a1f26"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ca7ca5abfbfe8d39e653870fbe8d7710be7a857f8a8386fc9de1aae2e02ce7e4"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8d7f98fde213f74561be1d6d3fa353656197f75d4edfbb3d94c9eb9b0fc47f5d"}, - {file = "aiohttp-3.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5216b6082c624b55cfe79af5d538e499cd5f5b976820eac31951fb4325974501"}, - {file = "aiohttp-3.9.1-cp310-cp310-win32.whl", hash = "sha256:0e7ba7ff228c0d9a2cd66194e90f2bca6e0abca810b786901a569c0de082f489"}, - {file = "aiohttp-3.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:c7e939f1ae428a86e4abbb9a7c4732bf4706048818dfd979e5e2839ce0159f23"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:df9cf74b9bc03d586fc53ba470828d7b77ce51b0582d1d0b5b2fb673c0baa32d"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecca113f19d5e74048c001934045a2b9368d77b0b17691d905af18bd1c21275e"}, - {file = "aiohttp-3.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8cef8710fb849d97c533f259103f09bac167a008d7131d7b2b0e3a33269185c0"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bea94403a21eb94c93386d559bce297381609153e418a3ffc7d6bf772f59cc35"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91c742ca59045dce7ba76cab6e223e41d2c70d79e82c284a96411f8645e2afff"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c93b7c2e52061f0925c3382d5cb8980e40f91c989563d3d32ca280069fd6a87"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee2527134f95e106cc1653e9ac78846f3a2ec1004cf20ef4e02038035a74544d"}, - {file = "aiohttp-3.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11ff168d752cb41e8492817e10fb4f85828f6a0142b9726a30c27c35a1835f01"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b8c3a67eb87394386847d188996920f33b01b32155f0a94f36ca0e0c635bf3e3"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c7b5d5d64e2a14e35a9240b33b89389e0035e6de8dbb7ffa50d10d8b65c57449"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:69985d50a2b6f709412d944ffb2e97d0be154ea90600b7a921f95a87d6f108a2"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:c9110c06eaaac7e1f5562caf481f18ccf8f6fdf4c3323feab28a93d34cc646bd"}, - {file = "aiohttp-3.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737e69d193dac7296365a6dcb73bbbf53bb760ab25a3727716bbd42022e8d7a"}, - {file = "aiohttp-3.9.1-cp311-cp311-win32.whl", hash = "sha256:4ee8caa925aebc1e64e98432d78ea8de67b2272252b0a931d2ac3bd876ad5544"}, - {file = "aiohttp-3.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:a34086c5cc285be878622e0a6ab897a986a6e8bf5b67ecb377015f06ed316587"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f800164276eec54e0af5c99feb9494c295118fc10a11b997bbb1348ba1a52065"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:500f1c59906cd142d452074f3811614be04819a38ae2b3239a48b82649c08821"}, - {file = "aiohttp-3.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0b0a6a36ed7e164c6df1e18ee47afbd1990ce47cb428739d6c99aaabfaf1b3af"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69da0f3ed3496808e8cbc5123a866c41c12c15baaaead96d256477edf168eb57"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:176df045597e674fa950bf5ae536be85699e04cea68fa3a616cf75e413737eb5"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b796b44111f0cab6bbf66214186e44734b5baab949cb5fb56154142a92989aeb"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f27fdaadce22f2ef950fc10dcdf8048407c3b42b73779e48a4e76b3c35bca26c"}, - {file = "aiohttp-3.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcb6532b9814ea7c5a6a3299747c49de30e84472fa72821b07f5a9818bce0f66"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:54631fb69a6e44b2ba522f7c22a6fb2667a02fd97d636048478db2fd8c4e98fe"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4b4c452d0190c5a820d3f5c0f3cd8a28ace48c54053e24da9d6041bf81113183"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:cae4c0c2ca800c793cae07ef3d40794625471040a87e1ba392039639ad61ab5b"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:565760d6812b8d78d416c3c7cfdf5362fbe0d0d25b82fed75d0d29e18d7fc30f"}, - {file = "aiohttp-3.9.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54311eb54f3a0c45efb9ed0d0a8f43d1bc6060d773f6973efd90037a51cd0a3f"}, - {file = "aiohttp-3.9.1-cp312-cp312-win32.whl", hash = "sha256:85c3e3c9cb1d480e0b9a64c658cd66b3cfb8e721636ab8b0e746e2d79a7a9eed"}, - {file = "aiohttp-3.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:11cb254e397a82efb1805d12561e80124928e04e9c4483587ce7390b3866d213"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8a22a34bc594d9d24621091d1b91511001a7eea91d6652ea495ce06e27381f70"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598db66eaf2e04aa0c8900a63b0101fdc5e6b8a7ddd805c56d86efb54eb66672"}, - {file = "aiohttp-3.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c9376e2b09895c8ca8b95362283365eb5c03bdc8428ade80a864160605715f1"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41473de252e1797c2d2293804e389a6d6986ef37cbb4a25208de537ae32141dd"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c5857612c9813796960c00767645cb5da815af16dafb32d70c72a8390bbf690"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffcd828e37dc219a72c9012ec44ad2e7e3066bec6ff3aaa19e7d435dbf4032ca"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:219a16763dc0294842188ac8a12262b5671817042b35d45e44fd0a697d8c8361"}, - {file = "aiohttp-3.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f694dc8a6a3112059258a725a4ebe9acac5fe62f11c77ac4dcf896edfa78ca28"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bcc0ea8d5b74a41b621ad4a13d96c36079c81628ccc0b30cfb1603e3dfa3a014"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:90ec72d231169b4b8d6085be13023ece8fa9b1bb495e4398d847e25218e0f431"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:cf2a0ac0615842b849f40c4d7f304986a242f1e68286dbf3bd7a835e4f83acfd"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0e49b08eafa4f5707ecfb321ab9592717a319e37938e301d462f79b4e860c32a"}, - {file = "aiohttp-3.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c59e0076ea31c08553e868cec02d22191c086f00b44610f8ab7363a11a5d9d8"}, - {file = "aiohttp-3.9.1-cp38-cp38-win32.whl", hash = "sha256:4831df72b053b1eed31eb00a2e1aff6896fb4485301d4ccb208cac264b648db4"}, - {file = "aiohttp-3.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:3135713c5562731ee18f58d3ad1bf41e1d8883eb68b363f2ffde5b2ea4b84cc7"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cfeadf42840c1e870dc2042a232a8748e75a36b52d78968cda6736de55582766"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70907533db712f7aa791effb38efa96f044ce3d4e850e2d7691abd759f4f0ae0"}, - {file = "aiohttp-3.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cdefe289681507187e375a5064c7599f52c40343a8701761c802c1853a504558"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7481f581251bb5558ba9f635db70908819caa221fc79ee52a7f58392778c636"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:49f0c1b3c2842556e5de35f122fc0f0b721334ceb6e78c3719693364d4af8499"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d406b01a9f5a7e232d1b0d161b40c05275ffbcbd772dc18c1d5a570961a1ca4"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d8e4450e7fe24d86e86b23cc209e0023177b6d59502e33807b732d2deb6975f"}, - {file = "aiohttp-3.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c0266cd6f005e99f3f51e583012de2778e65af6b73860038b968a0a8888487a"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab221850108a4a063c5b8a70f00dd7a1975e5a1713f87f4ab26a46e5feac5a0e"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c88a15f272a0ad3d7773cf3a37cc7b7d077cbfc8e331675cf1346e849d97a4e5"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:237533179d9747080bcaad4d02083ce295c0d2eab3e9e8ce103411a4312991a0"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:02ab6006ec3c3463b528374c4cdce86434e7b89ad355e7bf29e2f16b46c7dd6f"}, - {file = "aiohttp-3.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04fa38875e53eb7e354ece1607b1d2fdee2d175ea4e4d745f6ec9f751fe20c7c"}, - {file = "aiohttp-3.9.1-cp39-cp39-win32.whl", hash = "sha256:82eefaf1a996060602f3cc1112d93ba8b201dbf5d8fd9611227de2003dddb3b7"}, - {file = "aiohttp-3.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:9b05d33ff8e6b269e30a7957bd3244ffbce2a7a35a81b81c382629b80af1a8bf"}, - {file = "aiohttp-3.9.1.tar.gz", hash = "sha256:8fc49a87ac269d4529da45871e2ffb6874e87779c3d0e2ccd813c0899221239d"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:939677b61f9d72a4fa2a042a5eee2a99a24001a67c13da113b2e30396567db54"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f5cd333fcf7590a18334c90f8c9147c837a6ec8a178e88d90a9b96ea03194cc"}, + {file = "aiohttp-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82e6aa28dd46374f72093eda8bcd142f7771ee1eb9d1e223ff0fa7177a96b4a5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f56455b0c2c7cc3b0c584815264461d07b177f903a04481dfc33e08a89f0c26b"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bca77a198bb6e69795ef2f09a5f4c12758487f83f33d63acde5f0d4919815768"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e083c285857b78ee21a96ba1eb1b5339733c3563f72980728ca2b08b53826ca5"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab40e6251c3873d86ea9b30a1ac6d7478c09277b32e14745d0d3c6e76e3c7e29"}, + {file = "aiohttp-3.9.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df822ee7feaaeffb99c1a9e5e608800bd8eda6e5f18f5cfb0dc7eeb2eaa6bbec"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:acef0899fea7492145d2bbaaaec7b345c87753168589cc7faf0afec9afe9b747"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cd73265a9e5ea618014802ab01babf1940cecb90c9762d8b9e7d2cc1e1969ec6"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a78ed8a53a1221393d9637c01870248a6f4ea5b214a59a92a36f18151739452c"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:6b0e029353361f1746bac2e4cc19b32f972ec03f0f943b390c4ab3371840aabf"}, + {file = "aiohttp-3.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7cf5c9458e1e90e3c390c2639f1017a0379a99a94fdfad3a1fd966a2874bba52"}, + {file = "aiohttp-3.9.3-cp310-cp310-win32.whl", hash = "sha256:3e59c23c52765951b69ec45ddbbc9403a8761ee6f57253250c6e1536cacc758b"}, + {file = "aiohttp-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:055ce4f74b82551678291473f66dc9fb9048a50d8324278751926ff0ae7715e5"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6b88f9386ff1ad91ace19d2a1c0225896e28815ee09fc6a8932fded8cda97c3d"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c46956ed82961e31557b6857a5ca153c67e5476972e5f7190015018760938da2"}, + {file = "aiohttp-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07b837ef0d2f252f96009e9b8435ec1fef68ef8b1461933253d318748ec1acdc"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad46e6f620574b3b4801c68255492e0159d1712271cc99d8bdf35f2043ec266"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ed3e046ea7b14938112ccd53d91c1539af3e6679b222f9469981e3dac7ba1ce"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:039df344b45ae0b34ac885ab5b53940b174530d4dd8a14ed8b0e2155b9dddccb"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7943c414d3a8d9235f5f15c22ace69787c140c80b718dcd57caaade95f7cd93b"}, + {file = "aiohttp-3.9.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84871a243359bb42c12728f04d181a389718710129b36b6aad0fc4655a7647d4"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5eafe2c065df5401ba06821b9a054d9cb2848867f3c59801b5d07a0be3a380ae"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9d3c9b50f19704552f23b4eaea1fc082fdd82c63429a6506446cbd8737823da3"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f033d80bc6283092613882dfe40419c6a6a1527e04fc69350e87a9df02bbc283"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2c895a656dd7e061b2fd6bb77d971cc38f2afc277229ce7dd3552de8313a483e"}, + {file = "aiohttp-3.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1f5a71d25cd8106eab05f8704cd9167b6e5187bcdf8f090a66c6d88b634802b4"}, + {file = "aiohttp-3.9.3-cp311-cp311-win32.whl", hash = "sha256:50fca156d718f8ced687a373f9e140c1bb765ca16e3d6f4fe116e3df7c05b2c5"}, + {file = "aiohttp-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:5fe9ce6c09668063b8447f85d43b8d1c4e5d3d7e92c63173e6180b2ac5d46dd8"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:38a19bc3b686ad55804ae931012f78f7a534cce165d089a2059f658f6c91fa60"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:770d015888c2a598b377bd2f663adfd947d78c0124cfe7b959e1ef39f5b13869"}, + {file = "aiohttp-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee43080e75fc92bf36219926c8e6de497f9b247301bbf88c5c7593d931426679"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52df73f14ed99cee84865b95a3d9e044f226320a87af208f068ecc33e0c35b96"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc9b311743a78043b26ffaeeb9715dc360335e5517832f5a8e339f8a43581e4d"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b955ed993491f1a5da7f92e98d5dad3c1e14dc175f74517c4e610b1f2456fb11"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:504b6981675ace64c28bf4a05a508af5cde526e36492c98916127f5a02354d53"}, + {file = "aiohttp-3.9.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a6fe5571784af92b6bc2fda8d1925cccdf24642d49546d3144948a6a1ed58ca5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ba39e9c8627edc56544c8628cc180d88605df3892beeb2b94c9bc857774848ca"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e5e46b578c0e9db71d04c4b506a2121c0cb371dd89af17a0586ff6769d4c58c1"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:938a9653e1e0c592053f815f7028e41a3062e902095e5a7dc84617c87267ebd5"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:c3452ea726c76e92f3b9fae4b34a151981a9ec0a4847a627c43d71a15ac32aa6"}, + {file = "aiohttp-3.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ff30218887e62209942f91ac1be902cc80cddb86bf00fbc6783b7a43b2bea26f"}, + {file = "aiohttp-3.9.3-cp312-cp312-win32.whl", hash = "sha256:38f307b41e0bea3294a9a2a87833191e4bcf89bb0365e83a8be3a58b31fb7f38"}, + {file = "aiohttp-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:b791a3143681a520c0a17e26ae7465f1b6f99461a28019d1a2f425236e6eedb5"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0ed621426d961df79aa3b963ac7af0d40392956ffa9be022024cd16297b30c8c"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7f46acd6a194287b7e41e87957bfe2ad1ad88318d447caf5b090012f2c5bb528"}, + {file = "aiohttp-3.9.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feeb18a801aacb098220e2c3eea59a512362eb408d4afd0c242044c33ad6d542"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f734e38fd8666f53da904c52a23ce517f1b07722118d750405af7e4123933511"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b40670ec7e2156d8e57f70aec34a7216407848dfe6c693ef131ddf6e76feb672"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fdd215b7b7fd4a53994f238d0f46b7ba4ac4c0adb12452beee724ddd0743ae5d"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:017a21b0df49039c8f46ca0971b3a7fdc1f56741ab1240cb90ca408049766168"}, + {file = "aiohttp-3.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e99abf0bba688259a496f966211c49a514e65afa9b3073a1fcee08856e04425b"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:648056db9a9fa565d3fa851880f99f45e3f9a771dd3ff3bb0c048ea83fb28194"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8aacb477dc26797ee089721536a292a664846489c49d3ef9725f992449eda5a8"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:522a11c934ea660ff8953eda090dcd2154d367dec1ae3c540aff9f8a5c109ab4"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5bce0dc147ca85caa5d33debc4f4d65e8e8b5c97c7f9f660f215fa74fc49a321"}, + {file = "aiohttp-3.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b4af9f25b49a7be47c0972139e59ec0e8285c371049df1a63b6ca81fdd216a2"}, + {file = "aiohttp-3.9.3-cp38-cp38-win32.whl", hash = "sha256:298abd678033b8571995650ccee753d9458dfa0377be4dba91e4491da3f2be63"}, + {file = "aiohttp-3.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:69361bfdca5468c0488d7017b9b1e5ce769d40b46a9f4a2eed26b78619e9396c"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0fa43c32d1643f518491d9d3a730f85f5bbaedcbd7fbcae27435bb8b7a061b29"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:835a55b7ca49468aaaac0b217092dfdff370e6c215c9224c52f30daaa735c1c1"}, + {file = "aiohttp-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06a9b2c8837d9a94fae16c6223acc14b4dfdff216ab9b7202e07a9a09541168f"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abf151955990d23f84205286938796c55ff11bbfb4ccfada8c9c83ae6b3c89a3"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59c26c95975f26e662ca78fdf543d4eeaef70e533a672b4113dd888bd2423caa"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f95511dd5d0e05fd9728bac4096319f80615aaef4acbecb35a990afebe953b0e"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595f105710293e76b9dc09f52e0dd896bd064a79346234b521f6b968ffdd8e58"}, + {file = "aiohttp-3.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7c8b816c2b5af5c8a436df44ca08258fc1a13b449393a91484225fcb7545533"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f1088fa100bf46e7b398ffd9904f4808a0612e1d966b4aa43baa535d1b6341eb"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f59dfe57bb1ec82ac0698ebfcdb7bcd0e99c255bd637ff613760d5f33e7c81b3"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:361a1026c9dd4aba0109e4040e2aecf9884f5cfe1b1b1bd3d09419c205e2e53d"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:363afe77cfcbe3a36353d8ea133e904b108feea505aa4792dad6585a8192c55a"}, + {file = "aiohttp-3.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e2c45c208c62e955e8256949eb225bd8b66a4c9b6865729a786f2aa79b72e9d"}, + {file = "aiohttp-3.9.3-cp39-cp39-win32.whl", hash = "sha256:f7217af2e14da0856e082e96ff637f14ae45c10a5714b63c77f26d8884cf1051"}, + {file = "aiohttp-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:27468897f628c627230dba07ec65dc8d0db566923c48f29e084ce382119802bc"}, + {file = "aiohttp-3.9.3.tar.gz", hash = "sha256:90842933e5d1ff760fae6caca4b2b3edba53ba8f4b71e95dacf2818a2aca06f7"}, ] [package.dependencies] @@ -178,13 +178,13 @@ tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "p [[package]] name = "certifi" -version = "2023.11.17" +version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] @@ -344,13 +344,13 @@ test = ["pytest (>=6)"] [[package]] name = "freezegun" -version = "1.3.1" +version = "1.4.0" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, - {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -491,7 +491,7 @@ files = [ [[package]] name = "langchain-core" -version = "0.1.9" +version = "0.1.23" description = "Building applications with LLMs through composability" optional = false python-versions = ">=3.8.1,<4.0" @@ -501,7 +501,7 @@ develop = true [package.dependencies] anyio = ">=3,<5" jsonpatch = "^1.33" -langsmith = "~0.0.63" +langsmith = "^0.0.87" packaging = "^23.2" pydantic = ">=1,<3" PyYAML = ">=5.3" @@ -517,13 +517,13 @@ url = "../../core" [[package]] name = "langsmith" -version = "0.0.72" +version = "0.0.87" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = ">=3.8.1,<4.0" files = [ - {file = "langsmith-0.0.72-py3-none-any.whl", hash = "sha256:2cddd49cd7d1477409c8785746acf42dbd6709a7d36e751247a3cab5e3eee20e"}, - {file = "langsmith-0.0.72.tar.gz", hash = "sha256:505f517e2e67836a4e831917d8223a18cc59d51bdae1e4295fc6dff2266bab5d"}, + {file = "langsmith-0.0.87-py3-none-any.whl", hash = "sha256:8903d3811b9fc89eb18f5961c8e6935fbd2d0f119884fbf30dc70b8f8f4121fc"}, + {file = "langsmith-0.0.87.tar.gz", hash = "sha256:36c4cc47e5b54be57d038036a30fb19ce6e4c73048cd7a464b8f25b459694d34"}, ] [package.dependencies] @@ -532,85 +532,101 @@ requests = ">=2,<3" [[package]] name = "multidict" -version = "6.0.4" +version = "6.0.5" description = "multidict implementation" optional = false python-versions = ">=3.7" files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, + {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, + {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, + {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, + {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, + {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, + {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, + {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, + {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, + {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, + {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, + {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, + {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, + {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, + {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, + {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, + {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, + {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, + {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, + {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, + {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, + {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, + {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, + {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, + {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, + {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, + {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, + {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, + {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, + {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, + {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, + {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, + {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, ] [[package]] @@ -687,13 +703,13 @@ files = [ [[package]] name = "pluggy" -version = "1.3.0" +version = "1.4.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, + {file = "pluggy-1.4.0-py3-none-any.whl", hash = "sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981"}, + {file = "pluggy-1.4.0.tar.gz", hash = "sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be"}, ] [package.extras] @@ -702,18 +718,18 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pydantic" -version = "2.5.2" +version = "2.6.1" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, - {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, + {file = "pydantic-2.6.1-py3-none-any.whl", hash = "sha256:0b6a909df3192245cb736509a92ff69e4fef76116feffec68e93a567347bae6f"}, + {file = "pydantic-2.6.1.tar.gz", hash = "sha256:4fd5c182a2488dc63e6d32737ff19937888001e2a6d86e94b3f233104a5d1fa9"}, ] [package.dependencies] annotated-types = ">=0.4.0" -pydantic-core = "2.14.5" +pydantic-core = "2.16.2" typing-extensions = ">=4.6.1" [package.extras] @@ -721,116 +737,90 @@ email = ["email-validator (>=2.0.0)"] [[package]] name = "pydantic-core" -version = "2.14.5" +version = "2.16.2" description = "" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, - {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, - {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, - {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, - {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, - {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, - {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, - {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, - {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, - {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, - {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, - {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, - {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, - {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, - {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, - {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, - {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, - {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, - {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, - {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, - {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, - {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, - {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, - {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, - {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, - {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, - {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, - {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, - {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, - {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, - {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, - {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, - {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, - {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, - {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, - {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3fab4e75b8c525a4776e7630b9ee48aea50107fea6ca9f593c98da3f4d11bf7c"}, + {file = "pydantic_core-2.16.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8bde5b48c65b8e807409e6f20baee5d2cd880e0fad00b1a811ebc43e39a00ab2"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2924b89b16420712e9bb8192396026a8fbd6d8726224f918353ac19c4c043d2a"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16aa02e7a0f539098e215fc193c8926c897175d64c7926d00a36188917717a05"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:936a787f83db1f2115ee829dd615c4f684ee48ac4de5779ab4300994d8af325b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:459d6be6134ce3b38e0ef76f8a672924460c455d45f1ad8fdade36796df1ddc8"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9ee4febb249c591d07b2d4dd36ebcad0ccd128962aaa1801508320896575ef"}, + {file = "pydantic_core-2.16.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40a0bd0bed96dae5712dab2aba7d334a6c67cbcac2ddfca7dbcc4a8176445990"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:870dbfa94de9b8866b37b867a2cb37a60c401d9deb4a9ea392abf11a1f98037b"}, + {file = "pydantic_core-2.16.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:308974fdf98046db28440eb3377abba274808bf66262e042c412eb2adf852731"}, + {file = "pydantic_core-2.16.2-cp310-none-win32.whl", hash = "sha256:a477932664d9611d7a0816cc3c0eb1f8856f8a42435488280dfbf4395e141485"}, + {file = "pydantic_core-2.16.2-cp310-none-win_amd64.whl", hash = "sha256:8f9142a6ed83d90c94a3efd7af8873bf7cefed2d3d44387bf848888482e2d25f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:406fac1d09edc613020ce9cf3f2ccf1a1b2f57ab00552b4c18e3d5276c67eb11"}, + {file = "pydantic_core-2.16.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce232a6170dd6532096cadbf6185271e4e8c70fc9217ebe105923ac105da9978"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a90fec23b4b05a09ad988e7a4f4e081711a90eb2a55b9c984d8b74597599180f"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8aafeedb6597a163a9c9727d8a8bd363a93277701b7bfd2749fbefee2396469e"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9957433c3a1b67bdd4c63717eaf174ebb749510d5ea612cd4e83f2d9142f3fc8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d7a9165167269758145756db43a133608a531b1e5bb6a626b9ee24bc38a8f7"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dffaf740fe2e147fedcb6b561353a16243e654f7fe8e701b1b9db148242e1272"}, + {file = "pydantic_core-2.16.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8ed79883b4328b7f0bd142733d99c8e6b22703e908ec63d930b06be3a0e7113"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:cf903310a34e14651c9de056fcc12ce090560864d5a2bb0174b971685684e1d8"}, + {file = "pydantic_core-2.16.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:46b0d5520dbcafea9a8645a8164658777686c5c524d381d983317d29687cce97"}, + {file = "pydantic_core-2.16.2-cp311-none-win32.whl", hash = "sha256:70651ff6e663428cea902dac297066d5c6e5423fda345a4ca62430575364d62b"}, + {file = "pydantic_core-2.16.2-cp311-none-win_amd64.whl", hash = "sha256:98dc6f4f2095fc7ad277782a7c2c88296badcad92316b5a6e530930b1d475ebc"}, + {file = "pydantic_core-2.16.2-cp311-none-win_arm64.whl", hash = "sha256:ef6113cd31411eaf9b39fc5a8848e71c72656fd418882488598758b2c8c6dfa0"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:88646cae28eb1dd5cd1e09605680c2b043b64d7481cdad7f5003ebef401a3039"}, + {file = "pydantic_core-2.16.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b883af50eaa6bb3299780651e5be921e88050ccf00e3e583b1e92020333304b"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf26c2e2ea59d32807081ad51968133af3025c4ba5753e6a794683d2c91bf6e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:99af961d72ac731aae2a1b55ccbdae0733d816f8bfb97b41909e143de735f522"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02906e7306cb8c5901a1feb61f9ab5e5c690dbbeaa04d84c1b9ae2a01ebe9379"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5362d099c244a2d2f9659fb3c9db7c735f0004765bbe06b99be69fbd87c3f15"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ac426704840877a285d03a445e162eb258924f014e2f074e209d9b4ff7bf380"}, + {file = "pydantic_core-2.16.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b94cbda27267423411c928208e89adddf2ea5dd5f74b9528513f0358bba019cb"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6db58c22ac6c81aeac33912fb1af0e930bc9774166cdd56eade913d5f2fff35e"}, + {file = "pydantic_core-2.16.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:396fdf88b1b503c9c59c84a08b6833ec0c3b5ad1a83230252a9e17b7dfb4cffc"}, + {file = "pydantic_core-2.16.2-cp312-none-win32.whl", hash = "sha256:7c31669e0c8cc68400ef0c730c3a1e11317ba76b892deeefaf52dcb41d56ed5d"}, + {file = "pydantic_core-2.16.2-cp312-none-win_amd64.whl", hash = "sha256:a3b7352b48fbc8b446b75f3069124e87f599d25afb8baa96a550256c031bb890"}, + {file = "pydantic_core-2.16.2-cp312-none-win_arm64.whl", hash = "sha256:a9e523474998fb33f7c1a4d55f5504c908d57add624599e095c20fa575b8d943"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ae34418b6b389d601b31153b84dce480351a352e0bb763684a1b993d6be30f17"}, + {file = "pydantic_core-2.16.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:732bd062c9e5d9582a30e8751461c1917dd1ccbdd6cafb032f02c86b20d2e7ec"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b52776a2e3230f4854907a1e0946eec04d41b1fc64069ee774876bbe0eab55"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef551c053692b1e39e3f7950ce2296536728871110e7d75c4e7753fb30ca87f4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ebb892ed8599b23fa8f1799e13a12c87a97a6c9d0f497525ce9858564c4575a4"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa6c8c582036275997a733427b88031a32ffa5dfc3124dc25a730658c47a572f"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4ba0884a91f1aecce75202473ab138724aa4fb26d7707f2e1fa6c3e68c84fbf"}, + {file = "pydantic_core-2.16.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7924e54f7ce5d253d6160090ddc6df25ed2feea25bfb3339b424a9dd591688bc"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69a7b96b59322a81c2203be537957313b07dd333105b73db0b69212c7d867b4b"}, + {file = "pydantic_core-2.16.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7e6231aa5bdacda78e96ad7b07d0c312f34ba35d717115f4b4bff6cb87224f0f"}, + {file = "pydantic_core-2.16.2-cp38-none-win32.whl", hash = "sha256:41dac3b9fce187a25c6253ec79a3f9e2a7e761eb08690e90415069ea4a68ff7a"}, + {file = "pydantic_core-2.16.2-cp38-none-win_amd64.whl", hash = "sha256:f685dbc1fdadb1dcd5b5e51e0a378d4685a891b2ddaf8e2bba89bd3a7144e44a"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:55749f745ebf154c0d63d46c8c58594d8894b161928aa41adbb0709c1fe78b77"}, + {file = "pydantic_core-2.16.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b30b0dd58a4509c3bd7eefddf6338565c4905406aee0c6e4a5293841411a1286"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18de31781cdc7e7b28678df7c2d7882f9692ad060bc6ee3c94eb15a5d733f8f7"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5864b0242f74b9dd0b78fd39db1768bc3f00d1ffc14e596fd3e3f2ce43436a33"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8f9186ca45aee030dc8234118b9c0784ad91a0bb27fc4e7d9d6608a5e3d386c"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc6f6c9be0ab6da37bc77c2dda5f14b1d532d5dbef00311ee6e13357a418e646"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa057095f621dad24a1e906747179a69780ef45cc8f69e97463692adbcdae878"}, + {file = "pydantic_core-2.16.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ad84731a26bcfb299f9eab56c7932d46f9cad51c52768cace09e92a19e4cf55"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3b052c753c4babf2d1edc034c97851f867c87d6f3ea63a12e2700f159f5c41c3"}, + {file = "pydantic_core-2.16.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e0f686549e32ccdb02ae6f25eee40cc33900910085de6aa3790effd391ae10c2"}, + {file = "pydantic_core-2.16.2-cp39-none-win32.whl", hash = "sha256:7afb844041e707ac9ad9acad2188a90bffce2c770e6dc2318be0c9916aef1469"}, + {file = "pydantic_core-2.16.2-cp39-none-win_amd64.whl", hash = "sha256:9da90d393a8227d717c19f5397688a38635afec89f2e2d7af0df037f3249c39a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f60f920691a620b03082692c378661947d09415743e437a7478c309eb0e4f82"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:47924039e785a04d4a4fa49455e51b4eb3422d6eaacfde9fc9abf8fdef164e8a"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6294e76b0380bb7a61eb8a39273c40b20beb35e8c87ee101062834ced19c545"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe56851c3f1d6f5384b3051c536cc81b3a93a73faf931f404fef95217cf1e10d"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d776d30cde7e541b8180103c3f294ef7c1862fd45d81738d156d00551005784"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:72f7919af5de5ecfaf1eba47bf9a5d8aa089a3340277276e5636d16ee97614d7"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:4bfcbde6e06c56b30668a0c872d75a7ef3025dc3c1823a13cf29a0e9b33f67e8"}, + {file = "pydantic_core-2.16.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ff7c97eb7a29aba230389a2661edf2e9e06ce616c7e35aa764879b6894a44b25"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9b5f13857da99325dcabe1cc4e9e6a3d7b2e2c726248ba5dd4be3e8e4a0b6d0e"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a7e41e3ada4cca5f22b478c08e973c930e5e6c7ba3588fb8e35f2398cdcc1545"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60eb8ceaa40a41540b9acae6ae7c1f0a67d233c40dc4359c256ad2ad85bdf5e5"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7beec26729d496a12fd23cf8da9944ee338c8b8a17035a560b585c36fe81af20"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:22c5f022799f3cd6741e24f0443ead92ef42be93ffda0d29b2597208c94c3753"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:eca58e319f4fd6df004762419612122b2c7e7d95ffafc37e890252f869f3fb2a"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ed957db4c33bc99895f3a1672eca7e80e8cda8bd1e29a80536b4ec2153fa9804"}, + {file = "pydantic_core-2.16.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:459c0d338cc55d099798618f714b21b7ece17eb1a87879f2da20a3ff4c7628e2"}, + {file = "pydantic_core-2.16.2.tar.gz", hash = "sha256:0ba503850d8b8dcc18391f10de896ae51d37fe5fe43dbfb6a35c5c5cad271a06"}, ] [package.dependencies] @@ -838,13 +828,13 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pytest" -version = "7.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] @@ -895,13 +885,13 @@ dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "pytest-watcher" -version = "0.3.4" +version = "0.3.5" description = "Automatically rerun your tests on file modifications" optional = false python-versions = ">=3.7.0,<4.0.0" files = [ - {file = "pytest_watcher-0.3.4-py3-none-any.whl", hash = "sha256:edd2bd9c8a1fb14d48c9f4947234065eb9b4c1acedc0bf213b1f12501dfcffd3"}, - {file = "pytest_watcher-0.3.4.tar.gz", hash = "sha256:d39491ba15b589221bb9a78ef4bed3d5d1503aed08209b1a138aeb95b9117a18"}, + {file = "pytest_watcher-0.3.5-py3-none-any.whl", hash = "sha256:af00ca52c7be22dc34c0fd3d7ffef99057207a73b05dc5161fe3b2fe91f58130"}, + {file = "pytest_watcher-0.3.5.tar.gz", hash = "sha256:8896152460ba2b1a8200c12117c6611008ec96c8b2d811f0a05ab8a82b043ff8"}, ] [package.dependencies] @@ -934,6 +924,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -941,8 +932,16 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -959,6 +958,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -966,6 +966,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -994,28 +995,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.1.8" +version = "0.1.15" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7de792582f6e490ae6aef36a58d85df9f7a0cfd1b0d4fe6b4fb51803a3ac96fa"}, - {file = "ruff-0.1.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:c8e3255afd186c142eef4ec400d7826134f028a85da2146102a1172ecc7c3696"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff78a7583020da124dd0deb835ece1d87bb91762d40c514ee9b67a087940528b"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd8ee69b02e7bdefe1e5da2d5b6eaaddcf4f90859f00281b2333c0e3a0cc9cd6"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a05b0ddd7ea25495e4115a43125e8a7ebed0aa043c3d432de7e7d6e8e8cd6448"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e6f08ca730f4dc1b76b473bdf30b1b37d42da379202a059eae54ec7fc1fbcfed"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f35960b02df6b827c1b903091bb14f4b003f6cf102705efc4ce78132a0aa5af3"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d076717c67b34c162da7c1a5bda16ffc205e0e0072c03745275e7eab888719f"}, - {file = "ruff-0.1.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6a21ab023124eafb7cef6d038f835cb1155cd5ea798edd8d9eb2f8b84be07d9"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ce697c463458555027dfb194cb96d26608abab920fa85213deb5edf26e026664"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:db6cedd9ffed55548ab313ad718bc34582d394e27a7875b4b952c2d29c001b26"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:05ffe9dbd278965271252704eddb97b4384bf58b971054d517decfbf8c523f05"}, - {file = "ruff-0.1.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5daaeaf00ae3c1efec9742ff294b06c3a2a9db8d3db51ee4851c12ad385cda30"}, - {file = "ruff-0.1.8-py3-none-win32.whl", hash = "sha256:e49fbdfe257fa41e5c9e13c79b9e79a23a79bd0e40b9314bc53840f520c2c0b3"}, - {file = "ruff-0.1.8-py3-none-win_amd64.whl", hash = "sha256:f41f692f1691ad87f51708b823af4bb2c5c87c9248ddd3191c8f088e66ce590a"}, - {file = "ruff-0.1.8-py3-none-win_arm64.whl", hash = "sha256:aa8ee4f8440023b0a6c3707f76cadce8657553655dcbb5fc9b2f9bb9bee389f6"}, - {file = "ruff-0.1.8.tar.gz", hash = "sha256:f7ee467677467526cfe135eab86a40a0e8db43117936ac4f9b469ce9cdb3fb62"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:5fe8d54df166ecc24106db7dd6a68d44852d14eb0729ea4672bb4d96c320b7df"}, + {file = "ruff-0.1.15-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6f0bfbb53c4b4de117ac4d6ddfd33aa5fc31beeaa21d23c45c6dd249faf9126f"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0d432aec35bfc0d800d4f70eba26e23a352386be3a6cf157083d18f6f5881c8"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9405fa9ac0e97f35aaddf185a1be194a589424b8713e3b97b762336ec79ff807"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66ec24fe36841636e814b8f90f572a8c0cb0e54d8b5c2d0e300d28a0d7bffec"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6f8ad828f01e8dd32cc58bc28375150171d198491fc901f6f98d2a39ba8e3ff5"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86811954eec63e9ea162af0ffa9f8d09088bab51b7438e8b6488b9401863c25e"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd4025ac5e87d9b80e1f300207eb2fd099ff8200fa2320d7dc066a3f4622dc6b"}, + {file = "ruff-0.1.15-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17b93c02cdb6aeb696effecea1095ac93f3884a49a554a9afa76bb125c114c1"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ddb87643be40f034e97e97f5bc2ef7ce39de20e34608f3f829db727a93fb82c5"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:abf4822129ed3a5ce54383d5f0e964e7fef74a41e48eb1dfad404151efc130a2"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6c629cf64bacfd136c07c78ac10a54578ec9d1bd2a9d395efbee0935868bf852"}, + {file = "ruff-0.1.15-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:1bab866aafb53da39c2cadfb8e1c4550ac5340bb40300083eb8967ba25481447"}, + {file = "ruff-0.1.15-py3-none-win32.whl", hash = "sha256:2417e1cb6e2068389b07e6fa74c306b2810fe3ee3476d5b8a96616633f40d14f"}, + {file = "ruff-0.1.15-py3-none-win_amd64.whl", hash = "sha256:3837ac73d869efc4182d9036b1405ef4c73d9b1f88da2413875e34e0d6919587"}, + {file = "ruff-0.1.15-py3-none-win_arm64.whl", hash = "sha256:9a933dfb1c14ec7a33cceb1e49ec4a16b51ce3c20fd42663198746efc0427360"}, + {file = "ruff-0.1.15.tar.gz", hash = "sha256:f6dfa8c1b21c913c326919056c390966648b680966febcb796cc9d1aaab8564e"}, ] [[package]] @@ -1053,17 +1054,17 @@ files = [ [[package]] name = "syrupy" -version = "4.6.0" +version = "4.6.1" description = "Pytest Snapshot Test Utility" optional = false python-versions = ">=3.8.1,<4" files = [ - {file = "syrupy-4.6.0-py3-none-any.whl", hash = "sha256:747aae1bcf3cb3249e33b1e6d81097874d23615982d5686ebe637875b0775a1b"}, - {file = "syrupy-4.6.0.tar.gz", hash = "sha256:231b1f5d00f1f85048ba81676c79448076189c4aef4d33f21ae32f3b4c565a54"}, + {file = "syrupy-4.6.1-py3-none-any.whl", hash = "sha256:203e52f9cb9fa749cf683f29bd68f02c16c3bc7e7e5fe8f2fc59bdfe488ce133"}, + {file = "syrupy-4.6.1.tar.gz", hash = "sha256:37a835c9ce7857eeef86d62145885e10b3cb9615bc6abeb4ce404b3f18e1bb36"}, ] [package.dependencies] -pytest = ">=7.0.0,<8.0.0" +pytest = ">=7.0.0,<9.0.0" [[package]] name = "tabulate" @@ -1095,16 +1096,17 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "together" -version = "0.2.10" +version = "0.2.11" description = "Python client for Together's Cloud Platform!" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "together-0.2.10-py3-none-any.whl", hash = "sha256:f64d9e6e7a6ca00e509e55d7912786f54a6bb0d8009c761a2dd7c86777d992cf"}, - {file = "together-0.2.10.tar.gz", hash = "sha256:3e08f3207b61cd5aed54232e44976fbfc401bdc62327e16a767962ef14e62979"}, + {file = "together-0.2.11-py3-none-any.whl", hash = "sha256:0760aafe1b53cf41cb58592086a2b5edef956a2536b52daecfc2f9ef0fc736d5"}, + {file = "together-0.2.11.tar.gz", hash = "sha256:906ae626b0ec49f7ea90d5660fb4c28664a1f55719f998e868d035f417316e24"}, ] [package.dependencies] +aiohttp = ">=3.7.4,<4.0.0" pydantic = ">=2.5.0,<3.0.0" requests = ">=2.31.0,<3.0.0" sseclient-py = ">=1.7.2,<2.0.0" @@ -1125,13 +1127,13 @@ files = [ [[package]] name = "tqdm" -version = "4.66.1" +version = "4.66.2" description = "Fast, Extensible Progress Meter" optional = false python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.1-py3-none-any.whl", hash = "sha256:d302b3c5b53d47bce91fea46679d9c3c6508cf6332229aa1e7d8653723793386"}, - {file = "tqdm-4.66.1.tar.gz", hash = "sha256:d88e651f9db8d8551a62556d3cff9e3034274ca5d66e93197cf2490e2dcb69c7"}, + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, ] [package.dependencies] @@ -1166,13 +1168,13 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6. [[package]] name = "types-requests" -version = "2.31.0.20240106" +version = "2.31.0.20240125" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.31.0.20240106.tar.gz", hash = "sha256:0e1c731c17f33618ec58e022b614a1a2ecc25f7dc86800b36ef341380402c612"}, - {file = "types_requests-2.31.0.20240106-py3-none-any.whl", hash = "sha256:da997b3b6a72cc08d09f4dba9802fdbabc89104b35fe24ee588e674037689354"}, + {file = "types-requests-2.31.0.20240125.tar.gz", hash = "sha256:03a28ce1d7cd54199148e043b2079cdded22d6795d19a2c2a6791a4b2b5e2eb5"}, + {file = "types_requests-2.31.0.20240125-py3-none-any.whl", hash = "sha256:9592a9a4cb92d6d75d9b491a41477272b710e021011a2a3061157e2fb1f1a5d1"}, ] [package.dependencies] @@ -1191,54 +1193,57 @@ files = [ [[package]] name = "urllib3" -version = "2.1.0" +version = "2.2.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, + {file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"}, + {file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "watchdog" -version = "3.0.0" +version = "4.0.0" description = "Filesystem events monitoring" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, - {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, - {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, - {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, - {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, - {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, - {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, - {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, - {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, - {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, - {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, - {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, - {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, - {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, + {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, + {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, + {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:45cc09cc4c3b43fb10b59ef4d07318d9a3ecdbff03abd2e36e77b6dd9f9a5c85"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eed82cdf79cd7f0232e2fdc1ad05b06a5e102a43e331f7d041e5f0e0a34a51c4"}, + {file = "watchdog-4.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ba30a896166f0fee83183cec913298151b73164160d965af2e93a20bbd2ab605"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d18d7f18a47de6863cd480734613502904611730f8def45fc52a5d97503e5101"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2895bf0518361a9728773083908801a376743bcc37dfa252b801af8fd281b1ca"}, + {file = "watchdog-4.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:87e9df830022488e235dd601478c15ad73a0389628588ba0b028cb74eb72fed8"}, + {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, + {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, + {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, + {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, + {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, + {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, + {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, + {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, ] [package.extras] @@ -1350,4 +1355,4 @@ multidict = ">=4.0" [metadata] lock-version = "2.0" python-versions = ">=3.8.1,<4.0" -content-hash = "c5961892ffafa51111ae7e15ccae510363d066406706f85648b9e02dc794cf45" +content-hash = "f4ec4f7bc38cbd2acf749d2f8947ac6b7c80814c5048ce803358306f5b46379d" diff --git a/libs/partners/together/pyproject.toml b/libs/partners/together/pyproject.toml index 851383a7bc1bd..a4312f78d3612 100644 --- a/libs/partners/together/pyproject.toml +++ b/libs/partners/together/pyproject.toml @@ -12,7 +12,7 @@ license = "MIT" [tool.poetry.dependencies] python = ">=3.8.1,<4.0" -langchain-core = ">=0.0.12" +langchain-core = "^0.1" together = "^0.2.10" requests = "^2" aiohttp = "^3.9.1" @@ -23,11 +23,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = "^7.3.0" freezegun = "^1.2.2" -pytest-mock = "^3.10.0" +pytest-mock = "^3.10.0" syrupy = "^4.0.2" pytest-watcher = "^0.3.4" pytest-asyncio = "^0.21.1" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.poetry.group.codespell] optional = true @@ -48,20 +48,20 @@ ruff = "^0.1.5" [tool.poetry.group.typing.dependencies] mypy = "^0.991" -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } types-requests = "^2" [tool.poetry.group.dev] optional = true [tool.poetry.group.dev.dependencies] -langchain-core = {path = "../../core", develop = true} +langchain-core = { path = "../../core", develop = true } [tool.ruff.lint] select = [ - "E", # pycodestyle - "F", # pyflakes - "I", # isort + "E", # pycodestyle + "F", # pyflakes + "I", # isort "T201", # print ] @@ -69,9 +69,7 @@ select = [ disallow_untyped_defs = "True" [tool.coverage.run] -omit = [ - "tests/*", -] +omit = ["tests/*"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/templates/rag-ollama-multi-query/rag_ollama_multi_query/chain.py b/templates/rag-ollama-multi-query/rag_ollama_multi_query/chain.py index dea1ea5fe53db..1bc81584532b0 100644 --- a/templates/rag-ollama-multi-query/rag_ollama_multi_query/chain.py +++ b/templates/rag-ollama-multi-query/rag_ollama_multi_query/chain.py @@ -1,7 +1,3 @@ -from typing import List - -from langchain.chains import LLMChain -from langchain.output_parsers import PydanticOutputParser from langchain.retrievers.multi_query import MultiQueryRetriever from langchain.text_splitter import RecursiveCharacterTextSplitter from langchain_community.chat_models import ChatOllama, ChatOpenAI @@ -10,7 +6,7 @@ from langchain_community.vectorstores import Chroma from langchain_core.output_parsers import StrOutputParser from langchain_core.prompts import ChatPromptTemplate, PromptTemplate -from langchain_core.pydantic_v1 import BaseModel, Field +from langchain_core.pydantic_v1 import BaseModel from langchain_core.runnables import RunnableParallel, RunnablePassthrough # Load @@ -29,23 +25,6 @@ ) -# Output parser will split the LLM result into a list of queries -class LineList(BaseModel): - # "lines" is the key (attribute name) of the parsed output - lines: List[str] = Field(description="Lines of text") - - -class LineListOutputParser(PydanticOutputParser): - def __init__(self) -> None: - super().__init__(pydantic_object=LineList) - - def parse(self, text: str) -> LineList: - lines = text.strip().split("\n") - return LineList(lines=lines) - - -output_parser = LineListOutputParser() - QUERY_PROMPT = PromptTemplate( input_variables=["question"], template="""You are an AI language model assistant. Your task is to generate five @@ -60,12 +39,9 @@ def parse(self, text: str) -> LineList: ollama_llm = "zephyr" llm = ChatOllama(model=ollama_llm) -# Chain -llm_chain = LLMChain(llm=llm, prompt=QUERY_PROMPT, output_parser=output_parser) - # Run -retriever = MultiQueryRetriever( - retriever=vectorstore.as_retriever(), llm_chain=llm_chain, parser_key="lines" +retriever = MultiQueryRetriever.from_llm( + vectorstore.as_retriever(), llm, prompt=QUERY_PROMPT ) # "lines" is the key (attribute name) of the parsed output # RAG prompt