From ca86f26915371e17fefa7b75dd0dc1b2731e05d8 Mon Sep 17 00:00:00 2001 From: Estelle Scifo Date: Mon, 21 Oct 2024 14:21:54 +0200 Subject: [PATCH] Restructure examples folder (#146) * Structure proposal * Backup old examples in a specific folder (tmp) * WIP: example folder structure refactoring * ruff * Add result formatter example * LLM examples * MistralAILLM example + doc * Simple KG builder example * Embeder examples * Weaviate example * Fix import for cohere embeddings * Format * Update README with links to new files * Move Pinecone examples * Can't remove this file yet - but remove link to this specific file from doc - need to keep the file until the next release but then remove * Pinecone + cleaning * Cleaning 'old' folder * Components examples * Test and harmonize retriever section * Deal with qdrant examples - add custom component * Nicer path definition * Mypy/ruff * Rename answer -> QA + add links * Use pre_filters variable for explicitness * ruff * ruff * Missing files for db operations * Fix openai example * Fix CI * :'( --- docs/source/user_guide_kg_builder.rst | 2 +- docs/source/user_guide_rag.rst | 26 + examples/README.md | 132 ++ .../build_graph/simple_kg_builder_from_pdf.py | 73 + .../simple_kg_builder_from_text.py | 70 + .../answer/custom_prompt.py} | 16 +- .../answer/langchain_compatiblity.py | 46 + .../components/custom_component.py | 66 + .../components/extractors/custom_extractor.py | 36 + .../llm_entity_relation_extractor.py | 33 + ...y_relation_extractor_with_custom_prompt.py | 33 + .../components/loaders/custom_loader.py | 27 + .../components/loaders/pdf_loader.py | 20 + .../components/resolvers/custom_resolver.py | 25 + .../resolvers/simple_entity_resolver.py | 25 + .../simple_entity_resolver_pre_filter.py | 36 + .../components/schema_builders}/schema.py | 0 .../components/splitters/custom_splitter.py | 14 + .../splitters/fixed_size_splitter.py | 14 + .../splitters/langhchain_splitter.py} | 0 .../splitters/llamaindex_splitter.py} | 0 .../components/writers/custom_writer.py | 21 + .../components/writers/neo4j_writer.py | 20 + .../pipeline/kg_builder_from_pdf.py | 0 .../pipeline/kg_builder_from_text.py | 0 .../build_graph}/pipeline/visualization.py | 0 .../embeddings/azure_openai_embeddings.py | 14 + .../customize/embeddings/cohere_embeddings.py | 12 + .../customize/embeddings/custom_embeddings.py | 17 + .../embeddings/mistalai_embeddings.py | 13 + .../customize/embeddings/ollama_embeddings.py | 16 + .../customize/embeddings/openai_embeddings.py | 13 + .../embeddings/vertexai_embeddings.py | 9 + examples/customize/llms/anthropic_llm.py | 14 + examples/customize/llms/cohere_llm.py | 12 + examples/customize/llms/custom_llm.py | 24 + examples/customize/llms/mistalai_llm.py | 12 + examples/customize/llms/openai_llm.py | 9 + examples/customize/llms/vertexai_llm.py | 12 + .../retrievers/external}/pinecone/README.md | 0 .../pinecone/pinecone_text_search.py} | 19 +- .../pinecone/pinecone_vector_search.py} | 9 + .../retrievers/external}/qdrant/README.md | 0 .../external/qdrant/qdrant_text_search.py} | 0 .../external/qdrant/qdrant_vector_search.py} | 3 +- .../retrievers/external}/weaviate/README.md | 0 .../weaviate_text_search_local_embedder.py} | 10 + .../weaviate_text_search_remote_embedder.py} | 10 + .../weaviate/weaviate_vector_search.py} | 12 +- .../hybrid_cypher_search.py | 0 .../hybrid_retrievers}/hybrid_search.py | 0 ...esult_formatter_vector_cypher_retriever.py | 57 + .../result_formatter_vector_retriever.py | 151 ++ .../customize/retrievers/use_pre_filters.py | 29 + ...ter and the Chamber of Secrets Summary.pdf | Bin ...y Potter and the Death Hallows Summary.pdf | Bin examples/data/embedding_avatar.py | 1539 +++++++++++++++++ examples/{ => data}/embedding_biology.py | 0 .../create_fulltext_index.py | 12 + .../create_vector_index.py | 18 + .../populate_vector_index.py | 13 + examples/graphrag_with_langchain_llm.py | 58 - examples/graphrag_with_mistral.py | 68 - examples/{pipeline => }/kg_builder.py | 43 +- examples/{ => old}/pipeline/__init__.py | 0 .../{ => old}/pipeline/kg_builder_example.py | 0 ...builder_two_documents_entity_resolution.py | 0 examples/openai_search.py | 51 - examples/pipeline/rag.py | 120 -- examples/qdrant/__init__.py | 0 examples/{ => question_answering}/graphrag.py | 0 examples/retrieve/hybrid_cypher_retriever.py | 50 + examples/retrieve/hybrid_retriever.py | 44 + .../retrieve/similarity_search_for_text.py | 38 + .../retrieve/similarity_search_for_vector.py | 29 + examples/retrieve/text2cypher_search.py | 54 + examples/retrieve/vector_cypher_retriever.py | 47 + examples/similarity_search_for_text.py | 57 - .../similarity_search_for_text_mistral.py | 50 - examples/similarity_search_for_vector.py | 45 - examples/text2cypher_search.py | 46 - examples/vector_cypher_retrieval.py | 68 - examples/vector_search_with_filters.py | 73 - examples/weaviate/__init__.py | 0 poetry.lock | 449 ++--- pyproject.toml | 1 + src/neo4j_graphrag/embeddings/__init__.py | 2 + 87 files changed, 3299 insertions(+), 888 deletions(-) create mode 100644 examples/README.md create mode 100644 examples/build_graph/simple_kg_builder_from_pdf.py create mode 100644 examples/build_graph/simple_kg_builder_from_text.py rename examples/{graphrag_custom_prompt.py => customize/answer/custom_prompt.py} (75%) create mode 100644 examples/customize/answer/langchain_compatiblity.py create mode 100644 examples/customize/build_graph/components/custom_component.py create mode 100644 examples/customize/build_graph/components/extractors/custom_extractor.py create mode 100644 examples/customize/build_graph/components/extractors/llm_entity_relation_extractor.py create mode 100644 examples/customize/build_graph/components/extractors/llm_entity_relation_extractor_with_custom_prompt.py create mode 100644 examples/customize/build_graph/components/loaders/custom_loader.py create mode 100644 examples/customize/build_graph/components/loaders/pdf_loader.py create mode 100644 examples/customize/build_graph/components/resolvers/custom_resolver.py create mode 100644 examples/customize/build_graph/components/resolvers/simple_entity_resolver.py create mode 100644 examples/customize/build_graph/components/resolvers/simple_entity_resolver_pre_filter.py rename examples/{pipeline => customize/build_graph/components/schema_builders}/schema.py (100%) create mode 100644 examples/customize/build_graph/components/splitters/custom_splitter.py create mode 100644 examples/customize/build_graph/components/splitters/fixed_size_splitter.py rename examples/{__init__.py => customize/build_graph/components/splitters/langhchain_splitter.py} (100%) rename examples/{pinecone/__init__.py => customize/build_graph/components/splitters/llamaindex_splitter.py} (100%) create mode 100644 examples/customize/build_graph/components/writers/custom_writer.py create mode 100644 examples/customize/build_graph/components/writers/neo4j_writer.py rename examples/{ => customize/build_graph}/pipeline/kg_builder_from_pdf.py (100%) rename examples/{ => customize/build_graph}/pipeline/kg_builder_from_text.py (100%) rename examples/{ => customize/build_graph}/pipeline/visualization.py (100%) create mode 100644 examples/customize/embeddings/azure_openai_embeddings.py create mode 100644 examples/customize/embeddings/cohere_embeddings.py create mode 100644 examples/customize/embeddings/custom_embeddings.py create mode 100644 examples/customize/embeddings/mistalai_embeddings.py create mode 100644 examples/customize/embeddings/ollama_embeddings.py create mode 100644 examples/customize/embeddings/openai_embeddings.py create mode 100644 examples/customize/embeddings/vertexai_embeddings.py create mode 100644 examples/customize/llms/anthropic_llm.py create mode 100644 examples/customize/llms/cohere_llm.py create mode 100644 examples/customize/llms/custom_llm.py create mode 100644 examples/customize/llms/mistalai_llm.py create mode 100644 examples/customize/llms/openai_llm.py create mode 100644 examples/customize/llms/vertexai_llm.py rename examples/{ => customize/retrievers/external}/pinecone/README.md (100%) rename examples/{pinecone/vector_search.py => customize/retrievers/external/pinecone/pinecone_text_search.py} (50%) rename examples/{pinecone/text_search.py => customize/retrievers/external/pinecone/pinecone_vector_search.py} (74%) rename examples/{ => customize/retrievers/external}/qdrant/README.md (100%) rename examples/{qdrant/text_search.py => customize/retrievers/external/qdrant/qdrant_text_search.py} (100%) rename examples/{qdrant/vector_search.py => customize/retrievers/external/qdrant/qdrant_vector_search.py} (92%) rename examples/{ => customize/retrievers/external}/weaviate/README.md (100%) rename examples/{weaviate/text_search_local_embedder.py => customize/retrievers/external/weaviate/weaviate_text_search_local_embedder.py} (74%) rename examples/{weaviate/text_search_remote_embedder.py => customize/retrievers/external/weaviate/weaviate_text_search_remote_embedder.py} (72%) rename examples/{weaviate/vector_search.py => customize/retrievers/external/weaviate/weaviate_vector_search.py} (68%) rename examples/{ => customize/retrievers/hybrid_retrievers}/hybrid_cypher_search.py (100%) rename examples/{ => customize/retrievers/hybrid_retrievers}/hybrid_search.py (100%) create mode 100644 examples/customize/retrievers/result_formatter_vector_cypher_retriever.py create mode 100644 examples/customize/retrievers/result_formatter_vector_retriever.py create mode 100644 examples/customize/retrievers/use_pre_filters.py rename examples/{pipeline => data}/Harry Potter and the Chamber of Secrets Summary.pdf (100%) rename examples/{pipeline => data}/Harry Potter and the Death Hallows Summary.pdf (100%) create mode 100644 examples/data/embedding_avatar.py rename examples/{ => data}/embedding_biology.py (100%) create mode 100644 examples/database_operations/create_fulltext_index.py create mode 100644 examples/database_operations/create_vector_index.py create mode 100644 examples/database_operations/populate_vector_index.py delete mode 100644 examples/graphrag_with_langchain_llm.py delete mode 100644 examples/graphrag_with_mistral.py rename examples/{pipeline => }/kg_builder.py (82%) rename examples/{ => old}/pipeline/__init__.py (100%) rename examples/{ => old}/pipeline/kg_builder_example.py (100%) rename examples/{ => old}/pipeline/kg_builder_two_documents_entity_resolution.py (100%) delete mode 100644 examples/openai_search.py delete mode 100644 examples/pipeline/rag.py delete mode 100644 examples/qdrant/__init__.py rename examples/{ => question_answering}/graphrag.py (100%) create mode 100644 examples/retrieve/hybrid_cypher_retriever.py create mode 100644 examples/retrieve/hybrid_retriever.py create mode 100644 examples/retrieve/similarity_search_for_text.py create mode 100644 examples/retrieve/similarity_search_for_vector.py create mode 100644 examples/retrieve/text2cypher_search.py create mode 100644 examples/retrieve/vector_cypher_retriever.py delete mode 100644 examples/similarity_search_for_text.py delete mode 100644 examples/similarity_search_for_text_mistral.py delete mode 100644 examples/similarity_search_for_vector.py delete mode 100644 examples/text2cypher_search.py delete mode 100644 examples/vector_cypher_retrieval.py delete mode 100644 examples/vector_search_with_filters.py delete mode 100644 examples/weaviate/__init__.py diff --git a/docs/source/user_guide_kg_builder.rst b/docs/source/user_guide_kg_builder.rst index c2e0a609..f5aeeefd 100644 --- a/docs/source/user_guide_kg_builder.rst +++ b/docs/source/user_guide_kg_builder.rst @@ -33,7 +33,7 @@ A Knowledge Graph (KG) construction pipeline requires a few components: This package contains the interface and implementations for each of these components, which are detailed in the following sections. To see an end-to-end example of a Knowledge Graph construction pipeline, -refer to `this example `_. +refer to the `example folder `_ in the project GitHub repository. ********************************** Knowledge Graph Builder Components diff --git a/docs/source/user_guide_rag.rst b/docs/source/user_guide_rag.rst index 84585bac..0f13d758 100644 --- a/docs/source/user_guide_rag.rst +++ b/docs/source/user_guide_rag.rst @@ -78,6 +78,7 @@ If OpenAI cannot be used directly, there are a few available alternatives: - Use Azure OpenAI (GPT...). - Use Google VertexAI (Gemini...). - Use Anthropic LLM (Claude...). +- Use Mistral LLM - Use Cohere. - Use a local Ollama model. - Implement a custom interface. @@ -164,6 +165,31 @@ To use Anthropic, instantiate the `AnthropicLLM` class: See :ref:`anthropicllm`. +Using MistralAI LLM +------------------- + +To use MistralAI, instantiate the `MistralAILLM` class: + +.. code:: python + + from neo4j_graphrag.llm import MistralAILLM + + llm = MistralAILLM( + model_name="mistral-small-latest", + api_key=api_key, # can also set `MISTRAL_API_KEY` in env vars + ) + llm.invoke("say something") + + +.. note:: + + In order to run this code, the `mistralai` Python package needs to be installed: + `pip install mistralai` + +See :ref:`mistralaillm`. + + + Using Cohere LLM ---------------- diff --git a/examples/README.md b/examples/README.md new file mode 100644 index 00000000..b4df46ad --- /dev/null +++ b/examples/README.md @@ -0,0 +1,132 @@ +# Examples Index + +This folder contains examples usage for the different features +supported by the `neo4j-graphrag` package: + +- [Build Knowledge Graph](#build-knowledge-graph) from PDF or text +- [Retrieve](#retrieve) information from the graph +- [Question Answering](#answer-graphrag) (Q&A) + +Each of these steps have many customization options which +are listed in [the last section of this file](#customize). + +## Build Knowledge Graph + +- [End to end PDF to graph simple pipeline](build_graph/simple_kg_builder_from_pdf.py) +- [End to end text to graph simple pipeline](build_graph/simple_kg_builder_from_text.py) + + +## Retrieve + +- [Retriever from an embedding vector](retrieve/similarity_search_for_vector.py) +- [Retriever from a text](retrieve/similarity_search_for_text.py) +- [Graph-based retrieval with VectorCypherRetriever](retrieve/vector_cypher_retriever.py) +- [Hybrid retriever](./retrieve/hybrid_retriever.py) +- [Hybrid Cypher retriever](./retrieve/hybrid_cypher_retriever.py) +- [Text2Cypher retriever](./retrieve/text2cypher_search.py) + + +### External Retrievers + +#### Weaviate + +- [Vector search](customize/retrievers/external/weaviate/weaviate_vector_search.py) +- [Text search with local embeder](customize/retrievers/external/weaviate/weaviate_text_search_local_embedder.py) +- [Text search with remote embeder](customize/retrievers/external/weaviate/weaviate_text_search_remote_embedder.py) + +#### Pinecone + +- [Vector search](./customize/retrievers/external/pinecone/pinecone_vector_search.py) +- [Text search](./customize/retrievers/external/pinecone/pinecone_text_search.py) + + +### Qdrant + +- [Vector search](./customize/retrievers/external/qdrant/qdrant_vector_search.py) +- [Text search](./customize/retrievers/external/qdrant/qdrant_text_search.py) + + +## Answer: GraphRAG + +- [End to end GraphRAG](./answer/graphrag.py) + + +## Customize + +### Retriever + +- [Control result format for VectorRetriever](customize/retrievers/result_formatter_vector_retriever.py) +- [Control result format for VectorCypherRetriever](customize/retrievers/result_formatter_vector_cypher_retriever.py) + + +### LLMs + +- [OpenAI (GPT)](./customize/llms/openai_llm.py) +- [Azure OpenAI]() +- [VertexAI (Gemini)](./customize/llms/vertexai_llm.py) +- [MistralAI](./customize/llms/mistalai_llm.py) +- [Cohere](./customize/llms/cohere_llm.py) +- [Anthropic (Claude)](./customize/llms/anthropic_llm.py) +- [Ollama]() +- [Custom LLM](./customize/llms/custom_llm.py) + + +### Prompts + +- [Using a custom prompt](old/graphrag_custom_prompt.py) + + +### Embedders + +- [OpenAI](./customize/embeddings/openai_embeddings.py) +- [Azure OpenAI](./customize/embeddings/azure_openai_embeddings.py) +- [VertexAI](./customize/embeddings/vertexai_embeddings.py) +- [MistralAI](./customize/embeddings/mistalai_embeddings.py) +- [Cohere](./customize/embeddings/cohere_embeddings.py) +- [Ollama](./customize/embeddings/ollama_embeddings.py) +- [Custom LLM](./customize/embeddings/custom_embeddings.py) + + +### KG Construction - Pipeline + +- [End to end example with explicit components and text input](./customize/build_graph/pipeline/kg_builder_from_text.py) +- [End to end example with explicit components and PDF input](./customize/build_graph/pipeline/kg_builder_from_pdf.py) + +#### Components + +- Loaders: + - [Load PDF file](./customize/build_graph/components/loaders/pdf_loader.py) + - [Custom](./customize/build_graph/components/loaders/custom_loader.py) +- Text Splitter: + - [Fixed size splitter](./customize/build_graph/components/splitters/fixed_size_splitter.py) + - [Splitter from LangChain](./customize/build_graph/components/splitters/langhchain_splitter.py) + - [Splitter from LLamaIndex](./customize/build_graph/components/splitters/llamaindex_splitter.py) + - [Custom](./customize/build_graph/components/splitters/custom_splitter.py) +- [Chunk embedder]() +- Schema Builder: + - [User-defined](./customize/build_graph/components/schema_builders/schema.py) +- Entity Relation Extractor: + - [LLM-based](./customize/build_graph/components/extractors/llm_entity_relation_extractor.py) + - [LLM-based with custom prompt](./customize/build_graph/components/extractors/llm_entity_relation_extractor_with_custom_prompt.py) + - [Custom](./customize/build_graph/components/extractors/custom_extractor.py) +- Knowledge Graph Writer: + - [Neo4j writer](./customize/build_graph/components/writers/neo4j_writer.py) + - [Custom](./customize/build_graph/components/writers/custom_writer.py) +- Entity Resolver: + - [SinglePropertyExactMatchResolver](./customize/build_graph/components/resolvers/simple_entity_resolver.py) + - [SinglePropertyExactMatchResolver with pre-filter](./customize/build_graph/components/resolvers/simple_entity_resolver_pre_filter.py) + - [Custom resolver](./customize/build_graph/components/resolvers/custom_resolver.py) +- [Custom component](./customize/build_graph/components/custom_component.py) + + +### Answer: GraphRAG + +- [LangChain compatibility](./customize/answer/langchain_compatiblity.py) +- [Use a custom prompt](./customize/answer/custom_prompt.py) + + +## Database Operations + +- [Create vector index](database_operations/create_vector_index.py) +- [Create full text index](create_fulltext_index.py) +- [Populate vector index](populate_vector_index.py) diff --git a/examples/build_graph/simple_kg_builder_from_pdf.py b/examples/build_graph/simple_kg_builder_from_pdf.py new file mode 100644 index 00000000..7b33b256 --- /dev/null +++ b/examples/build_graph/simple_kg_builder_from_pdf.py @@ -0,0 +1,73 @@ +"""This example illustrates how to get started easily with the SimpleKGPipeline +and ingest PDF into a Neo4j Knowledge Graph. + +This example assumes a Neo4j db is up and running. Update the credentials below +if needed. + +OPENAI_API_KEY needs to be in the env vars. +""" + +import asyncio +from pathlib import Path + +import neo4j +from neo4j_graphrag.embeddings import OpenAIEmbeddings +from neo4j_graphrag.experimental.pipeline.kg_builder import SimpleKGPipeline +from neo4j_graphrag.experimental.pipeline.pipeline import PipelineResult +from neo4j_graphrag.llm import LLMInterface +from neo4j_graphrag.llm.openai_llm import OpenAILLM + +# Neo4j db infos +URI = "neo4j://localhost:7687" +AUTH = ("neo4j", "password") +DATABASE = "neo4j" + + +root_dir = Path(__file__).parents[4] +file_path = root_dir / "data" / "Harry Potter and the Chamber of Secrets Summary.pdf" + + +# Instantiate Entity and Relation objects. This defines the +# entities and relations the LLM will be looking for in the text. +ENTITIES = ["Person", "Organization", "Location"] +RELATIONS = ["SITUATED_AT", "INTERACTS", "LED_BY"] +POTENTIAL_SCHEMA = [ + ("Person", "SITUATED_AT", "Location"), + ("Person", "INTERACTS", "Person"), + ("Organization", "LED_BY", "Person"), +] + + +async def define_and_run_pipeline( + neo4j_driver: neo4j.Driver, + llm: LLMInterface, +) -> PipelineResult: + # Create an instance of the SimpleKGPipeline + kg_builder = SimpleKGPipeline( + llm=llm, + driver=neo4j_driver, + embedder=OpenAIEmbeddings(), + entities=ENTITIES, + relations=RELATIONS, + potential_schema=POTENTIAL_SCHEMA, + ) + return await kg_builder.run_async(file_path=str(file_path)) + + +async def main() -> PipelineResult: + llm = OpenAILLM( + model_name="gpt-4o", + model_params={ + "max_tokens": 2000, + "response_format": {"type": "json_object"}, + }, + ) + with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + res = await define_and_run_pipeline(driver, llm) + await llm.async_client.close() + return res + + +if __name__ == "__main__": + res = asyncio.run(main()) + print(res) diff --git a/examples/build_graph/simple_kg_builder_from_text.py b/examples/build_graph/simple_kg_builder_from_text.py new file mode 100644 index 00000000..ced91ae9 --- /dev/null +++ b/examples/build_graph/simple_kg_builder_from_text.py @@ -0,0 +1,70 @@ +"""This example illustrates how to get started easily with the SimpleKGPipeline +and ingest text into a Neo4j Knowledge Graph. + +This example assumes a Neo4j db is up and running. Update the credentials below +if needed. +""" + +import asyncio + +import neo4j +from neo4j_graphrag.embeddings import OpenAIEmbeddings +from neo4j_graphrag.experimental.pipeline.kg_builder import SimpleKGPipeline +from neo4j_graphrag.experimental.pipeline.pipeline import PipelineResult +from neo4j_graphrag.llm import LLMInterface +from neo4j_graphrag.llm.openai_llm import OpenAILLM + +# Neo4j db infos +URI = "neo4j://localhost:7687" +AUTH = ("neo4j", "password") +DATABASE = "neo4j" + +# Text to process +TEXT = """The son of Duke Leto Atreides and the Lady Jessica, Paul is the heir of House Atreides, +an aristocratic family that rules the planet Caladan.""" + +# Instantiate Entity and Relation objects. This defines the +# entities and relations the LLM will be looking for in the text. +ENTITIES = ["Person", "House", "Planet"] +RELATIONS = ["PARENT_OF", "HEIR_OF", "RULES"] +POTENTIAL_SCHEMA = [ + ("Person", "PARENT_OF", "Person"), + ("Person", "HEIR_OF", "House"), + ("House", "RULES", "Planet"), +] + + +async def define_and_run_pipeline( + neo4j_driver: neo4j.Driver, + llm: LLMInterface, +) -> PipelineResult: + # Create an instance of the SimpleKGPipeline + kg_builder = SimpleKGPipeline( + llm=llm, + driver=neo4j_driver, + embedder=OpenAIEmbeddings(), + entities=ENTITIES, + relations=RELATIONS, + potential_schema=POTENTIAL_SCHEMA, + from_pdf=False, + ) + return await kg_builder.run_async(text=TEXT) + + +async def main() -> PipelineResult: + llm = OpenAILLM( + model_name="gpt-4o", + model_params={ + "max_tokens": 2000, + "response_format": {"type": "json_object"}, + }, + ) + with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + res = await define_and_run_pipeline(driver, llm) + await llm.async_client.close() + return res + + +if __name__ == "__main__": + res = asyncio.run(main()) + print(res) diff --git a/examples/graphrag_custom_prompt.py b/examples/customize/answer/custom_prompt.py similarity index 75% rename from examples/graphrag_custom_prompt.py rename to examples/customize/answer/custom_prompt.py index 2625ef43..f67cf33e 100644 --- a/examples/graphrag_custom_prompt.py +++ b/examples/customize/answer/custom_prompt.py @@ -8,14 +8,11 @@ - Logging configuration """ -import logging - import neo4j from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings from neo4j_graphrag.generation import GraphRAG, RagTemplate from neo4j_graphrag.llm import OpenAILLM from neo4j_graphrag.retrievers import VectorCypherRetriever -from neo4j_graphrag.types import RetrieverResultItem URI = "neo4j://localhost:7687" AUTH = ("neo4j", "password") @@ -23,16 +20,6 @@ INDEX = "moviePlotsEmbedding" -# setup logger config -logger = logging.getLogger("neo4j_graphrag") -logging.basicConfig(format="%(asctime)s - %(message)s") -logger.setLevel(logging.DEBUG) - - -def formatter(record: neo4j.Record) -> RetrieverResultItem: - return RetrieverResultItem(content=f'{record.get("title")}: {record.get("plot")}') - - driver = neo4j.GraphDatabase.driver( URI, auth=AUTH, @@ -44,8 +31,7 @@ def formatter(record: neo4j.Record) -> RetrieverResultItem: retriever = VectorCypherRetriever( driver, index_name=INDEX, - retrieval_query="with node, score return node.title as title, node.plot as plot", - result_formatter=formatter, + retrieval_query="WITH node, score RETURN node.title as title, node.plot as plot", embedder=embedder, ) diff --git a/examples/customize/answer/langchain_compatiblity.py b/examples/customize/answer/langchain_compatiblity.py new file mode 100644 index 00000000..9c8b0e06 --- /dev/null +++ b/examples/customize/answer/langchain_compatiblity.py @@ -0,0 +1,46 @@ +"""The LLM interface is compatible with LangChain chat API, + so any LangChain implementation can be used. Same for embedders. + For instance, in GraphRAG: + +Requires OPENAI_API_KEY to be in the env var. +""" + +import neo4j +from langchain_openai.chat_models import ChatOpenAI +from langchain_openai.embeddings import OpenAIEmbeddings +from neo4j_graphrag.generation import GraphRAG +from neo4j_graphrag.retrievers import VectorCypherRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX = "moviePlotsEmbedding" + + +driver = neo4j.GraphDatabase.driver( + URI, + auth=AUTH, + database=DATABASE, +) + +embedder = OpenAIEmbeddings(model="text-embedding-ada-002") + +retriever = VectorCypherRetriever( + driver, + index_name=INDEX, + retrieval_query="WITH node, score RETURN node.title as title, node.plot as plot", + embedder=embedder, # type: ignore +) + +llm = ChatOpenAI(model="gpt-4o", temperature=0) + +rag = GraphRAG( + retriever=retriever, + llm=llm, # type: ignore +) + +result = rag.search("Tell me more about Avatar movies") +print(result.answer) + +driver.close() diff --git a/examples/customize/build_graph/components/custom_component.py b/examples/customize/build_graph/components/custom_component.py new file mode 100644 index 00000000..ea71ce4d --- /dev/null +++ b/examples/customize/build_graph/components/custom_component.py @@ -0,0 +1,66 @@ +"""This examples shows how to create a custom component +that can be added to a Pipeline with: + +c = MyComponent(min_value=0, max_value=10) +pipe = Pipeline() +pipe.add_component(c, name="my_component") +""" + +import random + +from neo4j_graphrag.experimental.pipeline import Component, DataModel +from pydantic import BaseModel, validate_call + + +class ComponentInputModel(BaseModel): + """A class to model the component inputs. + This is not required, inputs can also be passed individually. + + Note: can also inherit from DataModel. + """ + + text: str + + +class ComponentResultModel(DataModel): + """A class to model the component outputs. + Each component must have such a description of the output, + so that the parameter mapping can be validated before the + pipeline run starts. + """ + + value: int + text: str + + +class MyComponent(Component): + """Multiplies an input text by a random number + between `min_value` and `max_value` + """ + + def __init__(self, min_value: int, max_value: int) -> None: + self.min_value = min_value + self.max_value = max_value + + # this decorator is required when a Pydantic model is used in the inputs + @validate_call + async def run(self, inputs: ComponentInputModel) -> ComponentResultModel: + # logic here + random_value = random.randint(self.min_value, self.max_value) + return ComponentResultModel( + value=random_value, + text=inputs.text * random_value, + ) + + +if __name__ == "__main__": + import asyncio + + c = MyComponent(min_value=0, max_value=10) + print( + asyncio.run( + c.run( + inputs={"text": "Hello"} # type: ignore + ) + ) + ) diff --git a/examples/customize/build_graph/components/extractors/custom_extractor.py b/examples/customize/build_graph/components/extractors/custom_extractor.py new file mode 100644 index 00000000..c1bb7f52 --- /dev/null +++ b/examples/customize/build_graph/components/extractors/custom_extractor.py @@ -0,0 +1,36 @@ +from typing import Any, Optional + +from neo4j_graphrag.experimental.components.entity_relation_extractor import ( + EntityRelationExtractor, + OnError, +) +from neo4j_graphrag.experimental.components.pdf_loader import DocumentInfo +from neo4j_graphrag.experimental.components.types import Neo4jGraph, TextChunks + + +class MyExtractor(EntityRelationExtractor): + def __init__( + self, + *args: Any, + on_error: OnError = OnError.IGNORE, + create_lexical_graph: bool = True, + **kwargs: Any, + ) -> None: + super().__init__( + *args, + on_error=on_error, + create_lexical_graph=create_lexical_graph, + **kwargs, + ) + + async def run( + self, + chunks: TextChunks, + document_info: Optional[DocumentInfo] = None, + **kwargs: Any, + ) -> Neo4jGraph: + # Implement your logic here + # you can loop over all text chunks with: + for chunk in chunks.chunks: + pass + return Neo4jGraph(nodes=[], relationships=[]) diff --git a/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor.py b/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor.py new file mode 100644 index 00000000..71892f67 --- /dev/null +++ b/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor.py @@ -0,0 +1,33 @@ +from neo4j_graphrag.experimental.components.entity_relation_extractor import ( + LLMEntityRelationExtractor, +) +from neo4j_graphrag.experimental.components.types import ( + Neo4jGraph, + TextChunk, + TextChunks, +) +from neo4j_graphrag.llm import LLMInterface + + +async def main(llm: LLMInterface) -> Neo4jGraph: + """ + + Args: + llm (LLMInterface): Any LLM implemented in neo4j_graphrag.llm or from LangChain chat models. + """ + extractor = LLMEntityRelationExtractor( + llm=llm, + # optional: customize the prompt used for entity and relation extraction + # prompt_template="", + # optional: disable the creation of the lexical graph (Document and Chunk nodes) + # create_lexical_graph=False, + # optional: if an LLM error happens, ignore the chunk and continue process with the next ones + # default value is OnError.RAISE which will end the process + # on_error=OnError.IGNORE, + # optional: tune the max_concurrency parameter to optimize speed + # max_concurrency=5, + ) + graph = await extractor.run( + chunks=TextChunks(chunks=[TextChunk(text="....", index=0)]) + ) + return graph diff --git a/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor_with_custom_prompt.py b/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor_with_custom_prompt.py new file mode 100644 index 00000000..71892f67 --- /dev/null +++ b/examples/customize/build_graph/components/extractors/llm_entity_relation_extractor_with_custom_prompt.py @@ -0,0 +1,33 @@ +from neo4j_graphrag.experimental.components.entity_relation_extractor import ( + LLMEntityRelationExtractor, +) +from neo4j_graphrag.experimental.components.types import ( + Neo4jGraph, + TextChunk, + TextChunks, +) +from neo4j_graphrag.llm import LLMInterface + + +async def main(llm: LLMInterface) -> Neo4jGraph: + """ + + Args: + llm (LLMInterface): Any LLM implemented in neo4j_graphrag.llm or from LangChain chat models. + """ + extractor = LLMEntityRelationExtractor( + llm=llm, + # optional: customize the prompt used for entity and relation extraction + # prompt_template="", + # optional: disable the creation of the lexical graph (Document and Chunk nodes) + # create_lexical_graph=False, + # optional: if an LLM error happens, ignore the chunk and continue process with the next ones + # default value is OnError.RAISE which will end the process + # on_error=OnError.IGNORE, + # optional: tune the max_concurrency parameter to optimize speed + # max_concurrency=5, + ) + graph = await extractor.run( + chunks=TextChunks(chunks=[TextChunk(text="....", index=0)]) + ) + return graph diff --git a/examples/customize/build_graph/components/loaders/custom_loader.py b/examples/customize/build_graph/components/loaders/custom_loader.py new file mode 100644 index 00000000..35ebfd10 --- /dev/null +++ b/examples/customize/build_graph/components/loaders/custom_loader.py @@ -0,0 +1,27 @@ +"""Create a custom data loader to transform content into text.""" + +from pathlib import Path +from typing import Dict, Optional + +from neo4j_graphrag.experimental.components.pdf_loader import ( + DataLoader, + DocumentInfo, + PdfDocument, +) + + +class MyLoader(DataLoader): + async def run( + self, + filepath: Path, + metadata: Optional[Dict[str, str]] = None, + ) -> PdfDocument: + # Implement logic here + return PdfDocument( + text="", + document_info=DocumentInfo( + path=str(filepath), + # optionally, add some metadata as a dict + metadata=None, + ), + ) diff --git a/examples/customize/build_graph/components/loaders/pdf_loader.py b/examples/customize/build_graph/components/loaders/pdf_loader.py new file mode 100644 index 00000000..476eb221 --- /dev/null +++ b/examples/customize/build_graph/components/loaders/pdf_loader.py @@ -0,0 +1,20 @@ +"""Use the PdfLoader component to extract text from a PDF file.""" + +import asyncio +from pathlib import Path + +from neo4j_graphrag.experimental.components.pdf_loader import PdfLoader + +root_dir = Path(__file__).parents[4] +file_path = root_dir / "data" / "Harry Potter and the Chamber of Secrets Summary.pdf" + + +async def main() -> None: + loader = PdfLoader() + document = await loader.run(filepath=file_path) + print(document.text[:200]) + print(document.document_info) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/examples/customize/build_graph/components/resolvers/custom_resolver.py b/examples/customize/build_graph/components/resolvers/custom_resolver.py new file mode 100644 index 00000000..b7f1bf4f --- /dev/null +++ b/examples/customize/build_graph/components/resolvers/custom_resolver.py @@ -0,0 +1,25 @@ +"""The base EntityResolver class does not enforce +a specific signature for the run method, which makes it very flexible. +""" + +from typing import Any, Optional, Union + +import neo4j +from neo4j_graphrag.experimental.components.resolver import EntityResolver +from neo4j_graphrag.experimental.components.types import ResolutionStats + + +class MyEntityResolver(EntityResolver): + def __init__( + self, + driver: Union[neo4j.Driver, neo4j.AsyncDriver], + filter_query: Optional[str] = None, + ) -> None: + super().__init__(driver, filter_query) + + async def run(self, *args: Any, **kwargs: Any) -> ResolutionStats: + # logic here + return ResolutionStats( + number_of_nodes_to_resolve=0, + number_of_created_nodes=0, + ) diff --git a/examples/customize/build_graph/components/resolvers/simple_entity_resolver.py b/examples/customize/build_graph/components/resolvers/simple_entity_resolver.py new file mode 100644 index 00000000..49a4da39 --- /dev/null +++ b/examples/customize/build_graph/components/resolvers/simple_entity_resolver.py @@ -0,0 +1,25 @@ +"""The SinglePropertyExactMatchResolver merge nodes with same label +and exact same property value (by default using the "name" property). + +WARNING: this process is destructive, initial nodes are deleted and replaced +by the resolved ones, but all relationships are kept. +See apoc.refactor.mergeNodes documentation for more details. +""" + +import neo4j +from neo4j_graphrag.experimental.components.resolver import ( + SinglePropertyExactMatchResolver, +) +from neo4j_graphrag.experimental.components.types import ResolutionStats + + +async def main(driver: neo4j.Driver) -> None: + resolver = SinglePropertyExactMatchResolver( + driver, + # optionally, change the property used for resolution (default is "name") + # resolve_property="name", + # and the neo4j database where data is updated + # neo4j_database="neo4j", + ) + res: ResolutionStats = await resolver.run() + print(res) diff --git a/examples/customize/build_graph/components/resolvers/simple_entity_resolver_pre_filter.py b/examples/customize/build_graph/components/resolvers/simple_entity_resolver_pre_filter.py new file mode 100644 index 00000000..d9f8cbab --- /dev/null +++ b/examples/customize/build_graph/components/resolvers/simple_entity_resolver_pre_filter.py @@ -0,0 +1,36 @@ +"""The SinglePropertyExactMatchResolver merge nodes with same label +and exact same property value (by default using the "name" property). + +If some nodes need to be excluded from the resolution, for instance nodes +created from a previous run, a "WHERE" query can be added. The only variable +in the query scope is "entity". + +WARNING: this process is destructive, initial nodes are deleted and replaced +by the resolved ones, but all relationships are kept. +See apoc.refactor.mergeNodes documentation for more details. +""" + +import neo4j +from neo4j_graphrag.experimental.components.resolver import ( + SinglePropertyExactMatchResolver, +) +from neo4j_graphrag.experimental.components.types import ResolutionStats + + +async def main(driver: neo4j.Driver) -> None: + resolver = SinglePropertyExactMatchResolver( + driver, + # let's filter out some entities assuming the EntityToExclude label + # was manually added to nodes in the db + filter_query="WHERE NOT entity:EntityToExclude", + # another example: in some cases, we do not want to merge + # entities whose name is John Doe because we don't know if it + # corresponds to the same real person + # filter_query="WHERE entity.name <> 'John Doe'", + # optionally, change the property used for resolution (default is "name") + # resolve_property="name", + # and the neo4j database where data is updated + # neo4j_database="neo4j", + ) + res: ResolutionStats = await resolver.run() + print(res) diff --git a/examples/pipeline/schema.py b/examples/customize/build_graph/components/schema_builders/schema.py similarity index 100% rename from examples/pipeline/schema.py rename to examples/customize/build_graph/components/schema_builders/schema.py diff --git a/examples/customize/build_graph/components/splitters/custom_splitter.py b/examples/customize/build_graph/components/splitters/custom_splitter.py new file mode 100644 index 00000000..f28edbc1 --- /dev/null +++ b/examples/customize/build_graph/components/splitters/custom_splitter.py @@ -0,0 +1,14 @@ +from neo4j_graphrag.experimental.components.text_splitters.base import TextSplitter +from neo4j_graphrag.experimental.components.types import TextChunk, TextChunks + + +class MySplitter(TextSplitter): + async def run(self, text: str) -> TextChunks: + # your logic here + return TextChunks( + chunks=[ + TextChunk(text="", index=0), + # optional metadata + TextChunk(text="", index=1, metadata={"key": "value"}), + ] + ) diff --git a/examples/customize/build_graph/components/splitters/fixed_size_splitter.py b/examples/customize/build_graph/components/splitters/fixed_size_splitter.py new file mode 100644 index 00000000..8b2f2cc1 --- /dev/null +++ b/examples/customize/build_graph/components/splitters/fixed_size_splitter.py @@ -0,0 +1,14 @@ +from neo4j_graphrag.experimental.components.text_splitters.fixed_size_splitter import ( + FixedSizeSplitter, +) +from neo4j_graphrag.experimental.components.types import TextChunks + + +async def main() -> TextChunks: + splitter = FixedSizeSplitter( + # optionally, configure chunk_size and chunk_overlap + # chunk_size=4000, + # chunk_overlap=200, + ) + chunks = await splitter.run(text="text to split") + return chunks diff --git a/examples/__init__.py b/examples/customize/build_graph/components/splitters/langhchain_splitter.py similarity index 100% rename from examples/__init__.py rename to examples/customize/build_graph/components/splitters/langhchain_splitter.py diff --git a/examples/pinecone/__init__.py b/examples/customize/build_graph/components/splitters/llamaindex_splitter.py similarity index 100% rename from examples/pinecone/__init__.py rename to examples/customize/build_graph/components/splitters/llamaindex_splitter.py diff --git a/examples/customize/build_graph/components/writers/custom_writer.py b/examples/customize/build_graph/components/writers/custom_writer.py new file mode 100644 index 00000000..a66ccaec --- /dev/null +++ b/examples/customize/build_graph/components/writers/custom_writer.py @@ -0,0 +1,21 @@ +"""Implement a custom writer to save the results, for instance by using +custom Cypher queries. +""" + +import neo4j +from neo4j_graphrag.experimental.components.kg_writer import KGWriter, KGWriterModel +from neo4j_graphrag.experimental.components.types import Neo4jGraph +from pydantic import validate_call + + +class MyWriter(KGWriter): + def __init__(self, driver: neo4j.Driver) -> None: + self.driver = driver + + @validate_call + async def run(self, graph: Neo4jGraph) -> KGWriterModel: + try: + self.driver.execute_query("my query") + return KGWriterModel(status="SUCCESS") + except Exception: + return KGWriterModel(status="FAILURE") diff --git a/examples/customize/build_graph/components/writers/neo4j_writer.py b/examples/customize/build_graph/components/writers/neo4j_writer.py new file mode 100644 index 00000000..f85acc20 --- /dev/null +++ b/examples/customize/build_graph/components/writers/neo4j_writer.py @@ -0,0 +1,20 @@ +import neo4j +from neo4j_graphrag.experimental.components.kg_writer import ( + KGWriterModel, + Neo4jWriter, +) +from neo4j_graphrag.experimental.components.types import Neo4jGraph + + +async def main(driver: neo4j.Driver, graph: Neo4jGraph) -> KGWriterModel: + writer = Neo4jWriter( + driver, + # optionally, configure the neo4j database + # neo4j_database="neo4j", + # you can tune batch_size and max_concurrency to + # improve speed + # batch_size=1000, + # max_concurrency=5, + ) + result = await writer.run(graph=graph) + return result diff --git a/examples/pipeline/kg_builder_from_pdf.py b/examples/customize/build_graph/pipeline/kg_builder_from_pdf.py similarity index 100% rename from examples/pipeline/kg_builder_from_pdf.py rename to examples/customize/build_graph/pipeline/kg_builder_from_pdf.py diff --git a/examples/pipeline/kg_builder_from_text.py b/examples/customize/build_graph/pipeline/kg_builder_from_text.py similarity index 100% rename from examples/pipeline/kg_builder_from_text.py rename to examples/customize/build_graph/pipeline/kg_builder_from_text.py diff --git a/examples/pipeline/visualization.py b/examples/customize/build_graph/pipeline/visualization.py similarity index 100% rename from examples/pipeline/visualization.py rename to examples/customize/build_graph/pipeline/visualization.py diff --git a/examples/customize/embeddings/azure_openai_embeddings.py b/examples/customize/embeddings/azure_openai_embeddings.py new file mode 100644 index 00000000..62fe3ac8 --- /dev/null +++ b/examples/customize/embeddings/azure_openai_embeddings.py @@ -0,0 +1,14 @@ +"""This example demonstrate how to embed a text into a vector +using OpenAI models and API. +""" + +from neo4j_graphrag.embeddings import AzureOpenAIEmbeddings + +embeder = AzureOpenAIEmbeddings( + model="text-embedding-ada-002", + azure_endpoint="https://my-endpoint.openai.azure.com/", + api_key="", + api_version="", +) +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/embeddings/cohere_embeddings.py b/examples/customize/embeddings/cohere_embeddings.py new file mode 100644 index 00000000..92a4ca3d --- /dev/null +++ b/examples/customize/embeddings/cohere_embeddings.py @@ -0,0 +1,12 @@ +from neo4j_graphrag.embeddings import CohereEmbeddings + +# set api key here on in the CO_API_KEY env var +api_key = None +# api_key = "sk-..." + +embeder = CohereEmbeddings( + model="embed-english-v3.0", + api_key=api_key, +) +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/embeddings/custom_embeddings.py b/examples/customize/embeddings/custom_embeddings.py new file mode 100644 index 00000000..5b15eb0f --- /dev/null +++ b/examples/customize/embeddings/custom_embeddings.py @@ -0,0 +1,17 @@ +import random +from typing import Any + +from neo4j_graphrag.embeddings import Embedder + + +class CustomEmbeddings(Embedder): + def __init__(self, dimension: int = 10, **kwargs: Any): + self.dimension = dimension + + def embed_query(self, input: str) -> list[float]: + return [random.random() for _ in range(self.dimension)] + + +llm = CustomEmbeddings(dimensions=1024) +res = llm.embed_query("text") +print(res[:10]) diff --git a/examples/customize/embeddings/mistalai_embeddings.py b/examples/customize/embeddings/mistalai_embeddings.py new file mode 100644 index 00000000..d26c6cce --- /dev/null +++ b/examples/customize/embeddings/mistalai_embeddings.py @@ -0,0 +1,13 @@ +"""This example demonstrate how to embed a text into a vector +using MistralAI models and API. +""" + +from neo4j_graphrag.embeddings import MistralAIEmbeddings + +# set api key here on in the MISTRAL_API_KEY env var +api_key = None +# api_key = "sk-..." + +embeder = MistralAIEmbeddings(model="mistral-embed", api_key=api_key) +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/embeddings/ollama_embeddings.py b/examples/customize/embeddings/ollama_embeddings.py new file mode 100644 index 00000000..2a5a0046 --- /dev/null +++ b/examples/customize/embeddings/ollama_embeddings.py @@ -0,0 +1,16 @@ +"""This example demonstrate how to embed a text into a vector +using OpenAI models and API. +""" + +from neo4j_graphrag.embeddings import OpenAIEmbeddings + +# not used but needs to be provided +api_key = "ollama" + +embeder = OpenAIEmbeddings( + base_url="http://localhost:11434/v1", + api_key=api_key, + model="", +) +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/embeddings/openai_embeddings.py b/examples/customize/embeddings/openai_embeddings.py new file mode 100644 index 00000000..c1d9b57a --- /dev/null +++ b/examples/customize/embeddings/openai_embeddings.py @@ -0,0 +1,13 @@ +"""This example demonstrate how to embed a text into a vector +using OpenAI models and API. +""" + +from neo4j_graphrag.embeddings import OpenAIEmbeddings + +# set api key here on in the OPENAI_API_KEY env var +api_key = None +# api_key = "sk-..." + +embeder = OpenAIEmbeddings(model="text-embedding-ada-002", api_key=api_key) +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/embeddings/vertexai_embeddings.py b/examples/customize/embeddings/vertexai_embeddings.py new file mode 100644 index 00000000..ff6e48ed --- /dev/null +++ b/examples/customize/embeddings/vertexai_embeddings.py @@ -0,0 +1,9 @@ +"""This example demonstrate how to embed a text into a vector +using Google models and the VertexAI API. +""" + +from neo4j_graphrag.embeddings import VertexAIEmbeddings + +embeder = VertexAIEmbeddings(model="text-embedding-004") +res = embeder.embed_query("my question") +print(res[:10]) diff --git a/examples/customize/llms/anthropic_llm.py b/examples/customize/llms/anthropic_llm.py new file mode 100644 index 00000000..d84266a3 --- /dev/null +++ b/examples/customize/llms/anthropic_llm.py @@ -0,0 +1,14 @@ +from neo4j_graphrag.llm import AnthropicLLM, LLMResponse + +# set api key here on in the ANTHROPIC_API_KEY env var +api_key = None +# api_key = "sk-..." + + +llm = AnthropicLLM( + model_name="claude-3-opus-20240229", + model_params={"max_tokens": 1000}, # max_tokens must be specified + api_key=api_key, +) +res: LLMResponse = llm.invoke("say something") +print(res.content) diff --git a/examples/customize/llms/cohere_llm.py b/examples/customize/llms/cohere_llm.py new file mode 100644 index 00000000..7dfd8d6e --- /dev/null +++ b/examples/customize/llms/cohere_llm.py @@ -0,0 +1,12 @@ +from neo4j_graphrag.llm import CohereLLM, LLMResponse + +# set api key here on in the CO_API_KEY env var +api_key = None +# api_key = "sk-..." + +llm = CohereLLM( + model_name="command-r", + api_key=api_key, +) +res: LLMResponse = llm.invoke("say something") +print(res.content) diff --git a/examples/customize/llms/custom_llm.py b/examples/customize/llms/custom_llm.py new file mode 100644 index 00000000..e035048c --- /dev/null +++ b/examples/customize/llms/custom_llm.py @@ -0,0 +1,24 @@ +import random +import string +from typing import Any + +from neo4j_graphrag.llm import LLMInterface, LLMResponse + + +class CustomLLM(LLMInterface): + def __init__(self, model_name: str, **kwargs: Any): + super().__init__(model_name, **kwargs) + + def invoke(self, input: str) -> LLMResponse: + content: str = ( + self.model_name + ": " + "".join(random.choices(string.ascii_letters, k=30)) + ) + return LLMResponse(content=content) + + async def ainvoke(self, input: str) -> LLMResponse: + raise NotImplementedError() + + +llm = CustomLLM("") +res: LLMResponse = llm.invoke("text") +print(res.content) diff --git a/examples/customize/llms/mistalai_llm.py b/examples/customize/llms/mistalai_llm.py new file mode 100644 index 00000000..7aaa8e8d --- /dev/null +++ b/examples/customize/llms/mistalai_llm.py @@ -0,0 +1,12 @@ +from neo4j_graphrag.llm import MistralAILLM + +# set api key here on in the MISTRAL_API_KEY env var +api_key = None +# api_key = "sk-..." + + +llm = MistralAILLM( + model_name="mistral-small-latest", + api_key=api_key, +) +llm.invoke("say something") diff --git a/examples/customize/llms/openai_llm.py b/examples/customize/llms/openai_llm.py new file mode 100644 index 00000000..89ea44a4 --- /dev/null +++ b/examples/customize/llms/openai_llm.py @@ -0,0 +1,9 @@ +from neo4j_graphrag.llm import LLMResponse, OpenAILLM + +# set api key here on in the OPENAI_API_KEY env var +api_key = None +# api_key = "sk-..." + +llm = OpenAILLM(model_name="gpt-4o", api_key=api_key) +res: LLMResponse = llm.invoke("say something") +print(res.content) diff --git a/examples/customize/llms/vertexai_llm.py b/examples/customize/llms/vertexai_llm.py new file mode 100644 index 00000000..d89e780a --- /dev/null +++ b/examples/customize/llms/vertexai_llm.py @@ -0,0 +1,12 @@ +from neo4j_graphrag.llm import LLMResponse, VertexAILLM +from vertexai.generative_models import GenerationConfig + +generation_config = GenerationConfig(temperature=0.0) +llm = VertexAILLM( + model_name="gemini-1.5-flash-001", + generation_config=generation_config, + # add here any argument that will be passed to the + # vertexai.generative_models.GenerativeModel client +) +res: LLMResponse = llm.invoke("say something") +print(res.content) diff --git a/examples/pinecone/README.md b/examples/customize/retrievers/external/pinecone/README.md similarity index 100% rename from examples/pinecone/README.md rename to examples/customize/retrievers/external/pinecone/README.md diff --git a/examples/pinecone/vector_search.py b/examples/customize/retrievers/external/pinecone/pinecone_text_search.py similarity index 50% rename from examples/pinecone/vector_search.py rename to examples/customize/retrievers/external/pinecone/pinecone_text_search.py index c5779f25..48dcd49f 100644 --- a/examples/pinecone/vector_search.py +++ b/examples/customize/retrievers/external/pinecone/pinecone_text_search.py @@ -1,9 +1,19 @@ +"""This example demonstrates how to use PineconeNeo4jRetriever, ie vectors are +stored in the Pinecone database. + +See the [README](./README.md) for more +information about how spin up a Pinecone and Neo4j databases if needed. + +In this example, search is performed from a text. Embeddings are computed +using OpenAI models. See [../../embeddings/](../../embeddings/) for examples +using other supported embedders. +""" + from neo4j import GraphDatabase +from neo4j_graphrag.embeddings import OpenAIEmbeddings from neo4j_graphrag.retrievers import PineconeNeo4jRetriever from pinecone import Pinecone -from examples.embedding_biology import EMBEDDING_BIOLOGY - NEO4J_AUTH = ("neo4j", "password") NEO4J_URL = "neo4j://localhost:7687" PC_API_KEY = "API_KEY" @@ -12,14 +22,17 @@ def main() -> None: with GraphDatabase.driver(NEO4J_URL, auth=NEO4J_AUTH) as neo4j_driver: pc_client = Pinecone(PC_API_KEY) + embedder = OpenAIEmbeddings() + retriever = PineconeNeo4jRetriever( driver=neo4j_driver, client=pc_client, index_name="jeopardy", id_property_neo4j="id", + embedder=embedder, ) - res = retriever.search(query_vector=EMBEDDING_BIOLOGY, top_k=2) + res = retriever.search(query_text="biology", top_k=2) print(res) diff --git a/examples/pinecone/text_search.py b/examples/customize/retrievers/external/pinecone/pinecone_vector_search.py similarity index 74% rename from examples/pinecone/text_search.py rename to examples/customize/retrievers/external/pinecone/pinecone_vector_search.py index 606e361e..0f646213 100644 --- a/examples/pinecone/text_search.py +++ b/examples/customize/retrievers/external/pinecone/pinecone_vector_search.py @@ -1,3 +1,12 @@ +"""This example demonstrates how to use PineconeNeo4jRetriever, ie vectors are +stored in the Pinecone database. + +See the [README](./README.md) for more +information about how spin up a Pinecone and Neo4j databases if needed. + +In this example, search is performed from an already computed vector. +""" + from neo4j import GraphDatabase from neo4j_graphrag.embeddings.sentence_transformers import ( SentenceTransformerEmbeddings, diff --git a/examples/qdrant/README.md b/examples/customize/retrievers/external/qdrant/README.md similarity index 100% rename from examples/qdrant/README.md rename to examples/customize/retrievers/external/qdrant/README.md diff --git a/examples/qdrant/text_search.py b/examples/customize/retrievers/external/qdrant/qdrant_text_search.py similarity index 100% rename from examples/qdrant/text_search.py rename to examples/customize/retrievers/external/qdrant/qdrant_text_search.py diff --git a/examples/qdrant/vector_search.py b/examples/customize/retrievers/external/qdrant/qdrant_vector_search.py similarity index 92% rename from examples/qdrant/vector_search.py rename to examples/customize/retrievers/external/qdrant/qdrant_vector_search.py index f57d4bb6..45e00fc6 100644 --- a/examples/qdrant/vector_search.py +++ b/examples/customize/retrievers/external/qdrant/qdrant_vector_search.py @@ -1,9 +1,8 @@ +from embedding_biology import EMBEDDING_BIOLOGY from neo4j import GraphDatabase from neo4j_graphrag.retrievers import QdrantNeo4jRetriever from qdrant_client import QdrantClient -from examples.embedding_biology import EMBEDDING_BIOLOGY - NEO4J_URL = "neo4j://localhost:7687" NEO4J_AUTH = ("neo4j", "password") diff --git a/examples/weaviate/README.md b/examples/customize/retrievers/external/weaviate/README.md similarity index 100% rename from examples/weaviate/README.md rename to examples/customize/retrievers/external/weaviate/README.md diff --git a/examples/weaviate/text_search_local_embedder.py b/examples/customize/retrievers/external/weaviate/weaviate_text_search_local_embedder.py similarity index 74% rename from examples/weaviate/text_search_local_embedder.py rename to examples/customize/retrievers/external/weaviate/weaviate_text_search_local_embedder.py index 5767ad23..532f6bcd 100644 --- a/examples/weaviate/text_search_local_embedder.py +++ b/examples/customize/retrievers/external/weaviate/weaviate_text_search_local_embedder.py @@ -1,3 +1,13 @@ +"""This example demonstrates how to use WeaviateNeo4jRetriever, ie vectors are +stored in the Weaviate database. + +See the [README](./README.md) for more +information about how spin up a Weaviate and Neo4j databases if needed. + +In this example, we are embeddings a text and provide a local embeder to the +WeaviateNeo4jRetriever. +""" + from neo4j import GraphDatabase from neo4j_graphrag.embeddings.sentence_transformers import ( SentenceTransformerEmbeddings, diff --git a/examples/weaviate/text_search_remote_embedder.py b/examples/customize/retrievers/external/weaviate/weaviate_text_search_remote_embedder.py similarity index 72% rename from examples/weaviate/text_search_remote_embedder.py rename to examples/customize/retrievers/external/weaviate/weaviate_text_search_remote_embedder.py index e54debe3..ee789a6a 100644 --- a/examples/weaviate/text_search_remote_embedder.py +++ b/examples/customize/retrievers/external/weaviate/weaviate_text_search_remote_embedder.py @@ -1,3 +1,13 @@ +"""This example demonstrates how to use WeaviateNeo4jRetriever, ie vectors are +stored in the Weaviate database. + +See the [README](./README.md) for more +information about how spin up a Weaviate and Neo4j databases if needed. + +In this example, we are embeddings a text and provide a remote embeder to the +WeaviateNeo4jRetriever. +""" + from neo4j import GraphDatabase from neo4j_graphrag.retrievers import WeaviateNeo4jRetriever from weaviate.connect.helpers import connect_to_local diff --git a/examples/weaviate/vector_search.py b/examples/customize/retrievers/external/weaviate/weaviate_vector_search.py similarity index 68% rename from examples/weaviate/vector_search.py rename to examples/customize/retrievers/external/weaviate/weaviate_vector_search.py index 6272df0d..dd1f08c2 100644 --- a/examples/weaviate/vector_search.py +++ b/examples/customize/retrievers/external/weaviate/weaviate_vector_search.py @@ -1,9 +1,17 @@ +"""This example demonstrates how to use WeaviateNeo4jRetriever, ie vectors are +stored in the Weaviate database. + +See the [README](./README.md) for more +information about how spin up a Weaviate and Neo4j databases if needed. + +In this example, search is performed from an already existing vector. +""" + +from embedding_biology import EMBEDDING_BIOLOGY from neo4j import GraphDatabase from neo4j_graphrag.retrievers import WeaviateNeo4jRetriever from weaviate.connect.helpers import connect_to_local -from examples.embedding_biology import EMBEDDING_BIOLOGY - NEO4J_URL = "neo4j://localhost:7687" NEO4J_AUTH = ("neo4j", "password") diff --git a/examples/hybrid_cypher_search.py b/examples/customize/retrievers/hybrid_retrievers/hybrid_cypher_search.py similarity index 100% rename from examples/hybrid_cypher_search.py rename to examples/customize/retrievers/hybrid_retrievers/hybrid_cypher_search.py diff --git a/examples/hybrid_search.py b/examples/customize/retrievers/hybrid_retrievers/hybrid_search.py similarity index 100% rename from examples/hybrid_search.py rename to examples/customize/retrievers/hybrid_retrievers/hybrid_search.py diff --git a/examples/customize/retrievers/result_formatter_vector_cypher_retriever.py b/examples/customize/retrievers/result_formatter_vector_cypher_retriever.py new file mode 100644 index 00000000..2a8f520d --- /dev/null +++ b/examples/customize/retrievers/result_formatter_vector_cypher_retriever.py @@ -0,0 +1,57 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +Also requires minimal Cypher knowledge to write the retrieval query. + +It shows how to use a vector-cypher retriever to find context +similar to a query **text** using vector similarity + graph traversal. +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import VectorCypherRetriever +from neo4j_graphrag.types import RetrieverResultItem + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" + +# for each Movie node matched by the vector search, retrieve more context: +# the name of all actors starring in that movie +RETRIEVAL_QUERY = " MATCH (node)<-[:ACTED_IN]-(p:Person) RETURN node.title as movieTitle, node.plot as moviePlot, collect(p.name) as actors, score as similarityScore" + + +def my_result_formatter(record: neo4j.Record) -> RetrieverResultItem: + """The record is a row output from the RETRIEVAL_QUERY so it our case it contains + the following keys: + - movieTitle + - moviePlot + - actors + - similarityScore + """ + return RetrieverResultItem( + content=f"Movie title: {record.get('movieTitle')}, Plot: {record.get('moviePlot')}, Actors: {record.get('actors')}", + metadata={"score": record.get("similarityScore")}, + ) + + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = VectorCypherRetriever( + driver=driver, + index_name=INDEX_NAME, + # note: embedder is optional if you only use query_vector + embedder=OpenAIEmbeddings(), + retrieval_query=RETRIEVAL_QUERY, + result_formatter=my_result_formatter, + # optionally, set neo4j database + # neo4j_database="neo4j", + ) + + # Perform the similarity search for a text query + # (retrieve the top 5 most similar nodes) + query_text = "Who were the actors in Avatar?" + print(retriever.search(query_text=query_text, top_k=5)) diff --git a/examples/customize/retrievers/result_formatter_vector_retriever.py b/examples/customize/retrievers/result_formatter_vector_retriever.py new file mode 100644 index 00000000..e4c7448f --- /dev/null +++ b/examples/customize/retrievers/result_formatter_vector_retriever.py @@ -0,0 +1,151 @@ +"""This example demonstrates how to customize the retriever +results format. + +Retriever.get_search_result returns a RawSearchResult object that consists +in a list of neo4j.Records and an optional metadata dictionary. The +Retriever.search method returns a RetrieverResult object where each neo4j.Record +has been replaced by a RetrieverResultItem, ie a content and metadata dictionary. +The content is what will be used to augment the prompt. By default, this content +is a stringified representation of the neo4j.Record. There are multiple ways the +user can act on this format: +- Use the `return_properties` parameter +- And/or use the result_formatter parameter + +Let's consider the movie database where the movies' plot have been embedded. Movie +nodes have additional properties such as title and are connected to Actor nodes that +have a name property: + +(:Movie {embedding: [], title: "", plot: "", year: "", budget: 1000, ....}) + <-[:ACTED_IN]- + (:Actor {name: ...}) + +NB: to run this example OPENAI_API_KEY needs to be in the env vars. +To use another embedder, see the corresponding examples in ../customize/embeddings +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import VectorRetriever +from neo4j_graphrag.types import RetrieverResultItem + +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" + + +# Connect to Neo4j database +driver = neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) + + +query_text = "Find a movie about astronauts" +top_k_results = 1 + + +"""First, let's select the properties we want to return +with the return_properties parameter: +""" +print("=" * 50) +print("RETURN PROPERTIES") +retriever = VectorRetriever( + driver=driver, + index_name=INDEX_NAME, + embedder=OpenAIEmbeddings(), + return_properties=["title", "plot"], +) +print(retriever.search(query_text=query_text, top_k=top_k_results)) +print() +""" +OUTPUT: +RetrieverResult( + items=[ + RetrieverResultItem( + content="{'title': 'For All Mankind', 'plot': 'This movie documents the Apollo missions perhaps the most definitively of any movie under two hours. Al Reinert watched all the footage shot during the missions--over 6,000,000 feet of it, ...'}", + metadata={'score': 0.9354040622711182, 'nodeLabels': None, 'id': None} + ) + ] + metadata={'__retriever': 'VectorRetriever'} +) +""" + + +"""In a second example, we'll use the ability to format the result in detail +with a function +""" +print("=" * 50) +print("RESULT FOMATTER") + + +def my_result_formatter(record: neo4j.Record) -> RetrieverResultItem: + """ + If 'return_properties' are not specified, vector retrievers will return a record with keys: + - `node`: a dict representation of all node properties (except embedding if we can identify it) + - `id`: the node element ID + - `nodeLabels`: the labels attached to the node + - `score`: the score returned by the vector index search that tells us how close the node vector is from the query vector + + In the case of movies, we may want to keep in the content only the title and plot + (passed to the LLM afterward) and keep in the metadata the score. + This can be achieved with this function: + """ + node = record.get("node") + return RetrieverResultItem( + content=f"{node.get('title')}: {node.get('plot')}", + metadata={"score": record.get("score")}, + ) + + +retriever = VectorRetriever( + driver=driver, + index_name=INDEX_NAME, + embedder=OpenAIEmbeddings(), + result_formatter=my_result_formatter, +) + +query_text = "Find a movie about astronauts" +print(retriever.search(query_text=query_text, top_k=top_k_results)) +print() +""" +OUTPUT: +RetrieverResult( + items=[ + RetrieverResultItem( + content='For All Mankind: This movie documents the Apollo missions perhaps the most definitively of any movie under two hours. Al Reinert watched all the footage shot during the missions--over 6,000,000 feet of it, ...', + metadata={'score': 0.9354040622711182} + ) + ] + metadata={'__retriever': 'VectorRetriever'} +) +""" + + +"""We can mix both return_properties and result_formatter: +""" +print("=" * 50) +print("RETURN PROPERTIES + RESULT FOMATTER") + +retriever = VectorRetriever( + driver=driver, + index_name=INDEX_NAME, + embedder=OpenAIEmbeddings(), + return_properties=["title", "plot"], + result_formatter=my_result_formatter, +) + +query_text = "Find a movie about astronauts" +print(retriever.search(query_text=query_text, top_k=top_k_results)) +print() + +""" +OUTPUT: +RetrieverResult( + items=[ + RetrieverResultItem( + content='For All Mankind: This movie documents the Apollo missions perhaps the most definitively of any movie under two hours. Al Reinert watched all the footage shot during the missions--over 6,000,000 feet of it, ...', + metadata={'score': 0.9354040622711182}) + ] + metadata={'__retriever': 'VectorRetriever'} +) +""" + +driver.close() diff --git a/examples/customize/retrievers/use_pre_filters.py b/examples/customize/retrievers/use_pre_filters.py new file mode 100644 index 00000000..cc8c5e2f --- /dev/null +++ b/examples/customize/retrievers/use_pre_filters.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +import neo4j +from neo4j_graphrag.embeddings import OpenAIEmbeddings +from neo4j_graphrag.retrievers import VectorRetriever + +URI = "neo4j://localhost:7687" +AUTH = ("neo4j", "password") + +INDEX_NAME = "embedding-name" +DIMENSION = 1536 + +# Connect to Neo4j database +driver = neo4j.GraphDatabase.driver(URI, auth=AUTH) + + +# Initialize the retriever +retriever = VectorRetriever(driver, INDEX_NAME, embedder=OpenAIEmbeddings()) + +# Perform the search +query_text = "Find me a book about Fremen" +pre_filters = {"int_property": {"$gt": 100}} +print( + retriever.search( + query_text=query_text, + top_k=1, + filters=pre_filters, + ) +) diff --git a/examples/pipeline/Harry Potter and the Chamber of Secrets Summary.pdf b/examples/data/Harry Potter and the Chamber of Secrets Summary.pdf similarity index 100% rename from examples/pipeline/Harry Potter and the Chamber of Secrets Summary.pdf rename to examples/data/Harry Potter and the Chamber of Secrets Summary.pdf diff --git a/examples/pipeline/Harry Potter and the Death Hallows Summary.pdf b/examples/data/Harry Potter and the Death Hallows Summary.pdf similarity index 100% rename from examples/pipeline/Harry Potter and the Death Hallows Summary.pdf rename to examples/data/Harry Potter and the Death Hallows Summary.pdf diff --git a/examples/data/embedding_avatar.py b/examples/data/embedding_avatar.py new file mode 100644 index 00000000..b645a0ad --- /dev/null +++ b/examples/data/embedding_avatar.py @@ -0,0 +1,1539 @@ +# Avatar's plot +EMBEDDINGS_AVATAR = [ + 0.007940251380205154, + -0.03954245150089264, + -0.00444323243573308, + -0.03350786119699478, + 0.005005666520446539, + 0.01764059066772461, + -0.04237447306513786, + -0.027261529117822647, + -0.01103694923222065, + -0.0013018703320994973, + 0.024217765778303146, + 0.021253405138850212, + 0.017852332442998886, + -0.02555437572300434, + 0.005981655791401863, + -0.006266181822866201, + 0.019691823050379753, + -0.007311648223549128, + 0.003781544743105769, + -0.008727659471333027, + -0.00870119221508503, + 0.0036061974242329597, + -0.00009568622772349045, + 0.0004549102159217, + 0.00556148448958993, + -0.0022679343819618225, + 0.017587656155228615, + -0.027976151555776596, + 0.03528118133544922, + -0.031840406358242035, + 0.015430554747581482, + -0.014133647084236145, + -0.028452567756175995, + -0.02309289760887623, + -0.021068133413791656, + -0.0037087590899318457, + 0.012175051495432854, + -0.029537735506892204, + 0.00821154285222292, + -0.027843814343214035, + 0.00032650146749801934, + -0.0015053392853587866, + 0.008343880996108055, + -0.007007271517068148, + -0.027685008943080902, + 0.0023225233890116215, + -0.004866712260991335, + -0.028240827843546867, + -0.024482442066073418, + 0.004668205976486206, + 0.009746658615767956, + -0.0009023764869198203, + -0.01651572249829769, + -0.012466194108128548, + -0.00410577142611146, + -0.007907167077064514, + -0.018593421205878258, + 0.009528301656246185, + 0.009369496256113052, + -0.001698882901109755, + 0.014954139478504658, + 0.009839294478297234, + -0.007602790370583534, + -0.003960200119763613, + -0.000961101264692843, + 0.008694575168192387, + -0.01389543991535902, + -0.009336411952972412, + 0.005217406898736954, + -0.00626287329941988, + 0.04758857190608978, + 0.01848755218088627, + 0.017719993367791176, + -0.004244726151227951, + 0.03094051219522953, + -0.015007074922323227, + -0.006216554902493954, + -0.0006955990684218705, + -0.024151597172021866, + 0.025210298597812653, + 0.02085639350116253, + -0.03604874014854431, + -0.009091587737202644, + 0.019069837406277657, + 0.016290748491883278, + -0.014054244384169579, + 0.005197556223720312, + -0.005346436053514481, + -0.01638338528573513, + -0.005362978205084801, + 0.007013888563960791, + 0.004019752144813538, + 0.010348794050514698, + 0.009898846969008446, + -0.005716980900615454, + 0.022365041077136993, + -0.005932029336690903, + 0.02037997916340828, + -0.0018825011793524027, + -0.057434484362602234, + -0.007119758520275354, + 0.0246412456035614, + -0.010673020966351032, + -0.019572719931602478, + -0.013134499080479145, + 0.006047824863344431, + 0.012082415632903576, + 0.016700996086001396, + 0.0481443889439106, + 0.01028924249112606, + -0.018712526187300682, + 0.021147536113858223, + 0.009779742918908596, + -0.034275416284799576, + -0.010864910669624805, + -0.02409866265952587, + 0.005412604659795761, + 0.0035995806101709604, + -0.001502030878327787, + -0.007979952730238438, + 0.030596435070037842, + 0.015126178972423077, + -0.011956695467233658, + -0.01044143084436655, + 0.0010496019385755062, + 0.011195754632353783, + -0.012664700858294964, + -0.019890328869223595, + 0.0011521634878590703, + -0.004598728846758604, + 0.017018605023622513, + 0.015629060566425323, + 0.009488600306212902, + 0.00036454852670431137, + -0.01750825345516205, + 0.027685008943080902, + -0.02133280783891678, + 0.016436319798231125, + -0.0058294679038226604, + -0.0043175118044018745, + 0.00895263347774744, + 0.004588803742080927, + -0.010931079275906086, + -0.006772372871637344, + -0.009336411952972412, + -0.00024958030553534627, + 0.019109537824988365, + 0.0024515525437891483, + 0.015893736854195595, + 0.0064216782338917255, + 0.01716417632997036, + 0.003285279031842947, + 0.01426598522812128, + -0.020869627594947815, + 0.001879192772321403, + -0.03104638308286667, + -0.011883909814059734, + 0.016039308160543442, + 0.023688416928052902, + -0.013683700002729893, + 0.0018907723715528846, + 0.0016707611503079534, + -0.006861700676381588, + -0.03213154897093773, + 0.021822458133101463, + 0.02413836307823658, + 0.0002526819589547813, + -0.018738992512226105, + -0.008065971545875072, + -0.006679736543446779, + 0.002196802757680416, + 0.041315775364637375, + -0.003010678570717573, + 0.02116077020764351, + -0.014583595097064972, + 0.0072123948484659195, + -0.00447631673887372, + 0.00037798905395902693, + -0.02352961152791977, + 0.0011819395003840327, + -0.03194627910852432, + -0.008476218208670616, + 0.008893080987036228, + 0.014464491046965122, + -0.020406445488333702, + -0.008112289942800999, + 0.014186582528054714, + -0.006170236971229315, + 0.010792125016450882, + -0.000030499662898364477, + 0.010084118694067001, + -0.00024254986783489585, + -0.018024370074272156, + 0.004032985772937536, + -0.6665576100349426, + -0.02704978920519352, + 0.04049528017640114, + -0.005283575505018234, + 0.007960101589560509, + 0.00833064690232277, + -0.0015152646228671074, + -0.0043175118044018745, + 0.0006356336525641382, + 0.015404087491333485, + -0.013213901780545712, + 0.02937893010675907, + -0.00772851100191474, + -0.014477725140750408, + -0.003801395185291767, + -0.0077880630269646645, + 0.0030239124316722155, + -0.019149240106344223, + 0.02518383041024208, + -0.02094903029501438, + -0.029299527406692505, + 0.032237421721220016, + -0.004638430196791887, + -0.02116077020764351, + -0.00312316557392478, + 0.0003947380173485726, + 0.03869549185037613, + -0.0042877355590462685, + -0.024495674297213554, + 0.020088836550712585, + -0.01712447591125965, + 0.02961713634431362, + -0.0018841554410755634, + 0.01875222660601139, + 0.05068527162075043, + -0.016502488404512405, + -0.02135927602648735, + 0.03628695011138916, + 0.0002915560908149928, + 0.014464491046965122, + -0.017865564674139023, + 0.008324029855430126, + 0.022417975589632988, + -0.014054244384169579, + -0.006391902454197407, + -0.011652318760752678, + 0.02640133537352085, + -0.016039308160543442, + 0.008807062171399593, + 0.017918501049280167, + 0.012816889211535454, + 0.0032075305934995413, + 0.01504677627235651, + 0.03250209614634514, + 0.008198309689760208, + -0.017547955736517906, + 0.02776441164314747, + 0.015549658797681332, + 0.01831551268696785, + -0.005895636510103941, + -0.003500327467918396, + 0.030596435070037842, + -0.010838443413376808, + -0.0028684157878160477, + 0.0026368252001702785, + 0.03117872029542923, + -0.018090538680553436, + 0.009038652293384075, + -0.006881550885736942, + -0.0412893071770668, + 0.0387219600379467, + 0.010957546532154083, + -0.008013037033379078, + 0.004658280871808529, + 0.0031860258895903826, + 0.023436974734067917, + 0.03660455718636513, + -0.021690119057893753, + -0.011639084666967392, + 0.00913790613412857, + 0.027976151555776596, + -0.014702698215842247, + -0.010811975225806236, + 0.005402679089456797, + 0.020869627594947815, + -0.0069278692826628685, + -0.021623950451612473, + 0.0208828616887331, + 0.03652515634894371, + -0.0057533737272024155, + 0.004641738720238209, + 0.01827581226825714, + -0.01970505714416504, + -0.01888456381857395, + -0.016899501904845238, + 0.006378668360412121, + -0.013333004899322987, + -0.008635023608803749, + 0.01942714862525463, + -0.04075995832681656, + 0.00037033826811239123, + -0.008284328505396843, + 0.008218159899115562, + 0.0011174249229952693, + 0.013280070386826992, + -0.016780396923422813, + -0.018117006868124008, + -0.008185075595974922, + 0.04187159240245819, + -0.01504677627235651, + -0.009270243346691132, + 0.001905660261400044, + 0.005978347733616829, + -0.010725956410169601, + 0.02779087983071804, + -0.02470741607248783, + 0.019612420350313187, + 0.014477725140750408, + 0.012909525074064732, + -0.030781706795096397, + 0.00156571832485497, + -0.0004367138317320496, + 0.0045325602404773235, + -0.0028882664628326893, + -0.0019354362739250064, + 0.012883057817816734, + -0.0034937106538563967, + -0.016965670511126518, + -0.0073579661548137665, + 0.010335559956729412, + -0.014504192396998405, + 0.009687106125056744, + -0.009098204784095287, + 0.005042059347033501, + 0.016303982585668564, + -0.005766607355326414, + -0.007960101589560509, + -0.013015395030379295, + 0.019387446343898773, + -0.007688810117542744, + -0.0340636782348156, + 0.013194050639867783, + -0.010249541141092777, + -0.0027063023298978806, + -0.02440303936600685, + -0.021544547751545906, + -0.007715277373790741, + -0.012201519683003426, + -0.027579139918088913, + 0.009998099878430367, + -0.011215604841709137, + -0.009627554565668106, + -0.009740041568875313, + 0.022695884108543396, + 0.016965670511126518, + -0.006289340555667877, + 0.010811975225806236, + -0.00009982178016798571, + 0.0047409916296601295, + -0.019043369218707085, + 0.01875222660601139, + -0.0010893031721934676, + -0.012757336720824242, + -0.0036326649133116007, + 0.026308698579669, + -0.019982965663075447, + 0.00561111094430089, + 0.006709512323141098, + -0.0035367202945053577, + -0.024548610672354698, + -0.01095092948526144, + -0.018633123487234116, + -0.028690773993730545, + -0.001428418094292283, + 0.0034970189444720745, + 0.01872575841844082, + -0.005045367870479822, + -0.008747509680688381, + 0.0021207088138908148, + -0.00397674273699522, + -0.007523388136178255, + 0.016899501904845238, + -0.005085069220513105, + 0.001707153976894915, + 0.006216554902493954, + 0.0125588309019804, + 0.016965670511126518, + 0.03139045834541321, + -0.010414962656795979, + 0.025011790916323662, + -0.004066070541739464, + 0.02348990924656391, + 0.008588705211877823, + 0.0005347263067960739, + -0.001090130303055048, + 0.008032887242734432, + 0.00296270614489913, + 0.018805161118507385, + 0.013280070386826992, + 0.028108488768339157, + 0.018024370074272156, + -0.030040618032217026, + 0.009594470262527466, + -0.021412210538983345, + 0.004284427035599947, + -0.022325340658426285, + -0.005134695675224066, + -0.020631419494748116, + 0.018540486693382263, + -0.017283279448747635, + 0.020260874181985855, + -0.01013705413788557, + -0.0063455840572714806, + -0.02718212641775608, + 0.02596462145447731, + 0.028928982093930244, + -0.00362273957580328, + 0.005200864747166634, + -0.027102723717689514, + 0.0003021017473656684, + 0.001540904981084168, + -0.006666502449661493, + 0.0004892352735623717, + 0.011877292767167091, + -0.01936098001897335, + 0.025435270741581917, + 0.026388101279735565, + 0.01625104807317257, + 0.0013266836758702993, + -0.02379428595304489, + 0.019572719931602478, + 0.0007998148794285953, + -0.006759138777852058, + 0.02079022489488125, + 0.01875222660601139, + -0.01343887485563755, + -0.009084970690310001, + -0.009038652293384075, + 0.017653824761509895, + -0.0005322449724189937, + 0.0004834455030504614, + 0.014901204966008663, + 0.021862158551812172, + -0.021743055433034897, + 0.00880044512450695, + -0.0024763657711446285, + 0.01879192888736725, + 0.012651466764509678, + -0.032237421721220016, + -0.0025623852852731943, + -0.04028354212641716, + -0.007437368854880333, + -0.010825209319591522, + 0.001532633905299008, + 0.003086772747337818, + -0.01498060766607523, + 0.000961101264692843, + 0.007973335683345795, + 0.026057258248329163, + 0.013988075777888298, + 0.021200470626354218, + 0.00657717464491725, + 0.0018064071191474795, + 0.022867923602461815, + 0.03266090154647827, + -0.00858208816498518, + 0.002214999170973897, + -0.0069212522357702255, + 0.005260416306555271, + -0.005326585378497839, + 0.018236109986901283, + -0.006431603338569403, + 0.01181112416088581, + -0.027102723717689514, + -0.005300117656588554, + -0.005769915878772736, + -0.017309747636318207, + 0.002041306346654892, + 0.00694771995767951, + 0.016767164692282677, + -0.001715425169095397, + -0.0462387315928936, + 0.020671119913458824, + -0.0021240171045064926, + -0.0009238813072443008, + -0.024733882397413254, + -0.028664307668805122, + 0.0176008902490139, + -0.006739288102835417, + 0.0295906700193882, + -0.020194705575704575, + -0.0028948832768946886, + 0.021716587245464325, + -0.0003058237489312887, + -0.004582186695188284, + -0.013280070386826992, + 0.024297168478369713, + -0.005624344572424889, + 0.022934092208743095, + -0.016912735998630524, + -0.007940251380205154, + 0.018633123487234116, + -0.0051843225955963135, + -0.01979769393801689, + 0.01044143084436655, + 0.0010669712210074067, + -0.013551361858844757, + -0.031099317595362663, + -0.01262499950826168, + -0.026520438492298126, + -0.006219863425940275, + -0.01262499950826168, + -0.006424986757338047, + -0.023609014227986336, + 0.006841850001364946, + -0.022563546895980835, + -0.02765854261815548, + -0.0009329795138910413, + 0.02363548055291176, + -0.0011017099022865295, + -0.004347287584096193, + -0.007298414129763842, + -0.02092256210744381, + 0.014451256953179836, + 0.10227043926715851, + 0.021822458133101463, + 0.01607900857925415, + 0.0029031543526798487, + -0.01685979962348938, + -0.012843356467783451, + 0.005134695675224066, + -0.011354559101164341, + 0.008760743774473667, + 0.0021587559022009373, + 0.009065120480954647, + -0.004863403737545013, + -0.0062827239744365215, + 0.01581433415412903, + 0.029087787494063377, + -0.00011869021545862779, + 0.013882205821573734, + -0.004635121673345566, + -0.02268265187740326, + -0.02742033451795578, + 0.01614517718553543, + -0.015496723353862762, + 0.0170318391174078, + 0.007695426698774099, + -0.0034804767929017544, + -0.03581053391098976, + 0.013855738565325737, + 0.008403432555496693, + 0.0036822916008532047, + 0.0025359177961945534, + -0.024628013372421265, + 0.006418369710445404, + -0.0005996543914079666, + -0.0012547251535579562, + -0.006673119496554136, + -0.002754274755716324, + -0.003946966491639614, + 0.017217110842466354, + 0.021253405138850212, + 0.008575471118092537, + 0.014887970872223377, + 0.015734931454062462, + -0.0004342324973549694, + -0.019453614950180054, + 0.015509957447648048, + -0.00985252857208252, + -0.02190185897052288, + 0.027870282530784607, + 0.008092439733445644, + -0.0072123948484659195, + 0.04218920320272446, + 0.0018841554410755634, + -0.02481328509747982, + -0.012770570814609528, + 0.022391509264707565, + 0.0004168631858192384, + 0.012869823724031448, + 0.008641639724373817, + -0.010587001219391823, + 0.011956695467233658, + 0.006752522196620703, + -0.033772535622119904, + 0.007490303833037615, + -0.019215408712625504, + 0.0015417321119457483, + -0.028796644881367683, + -0.011493513360619545, + -0.007927017286419868, + -0.0030139870941638947, + -0.01605254039168358, + 0.010401729494333267, + -0.01118913758546114, + -0.009382730349898338, + -0.00607760064303875, + -0.005214098375290632, + -0.003311746520921588, + 0.025607310235500336, + -0.026308698579669, + -0.01281027216464281, + 0.005164471920579672, + 0.018672823905944824, + -0.01003780122846365, + -0.0062827239744365215, + -0.023106131702661514, + 0.0014648109208792448, + 0.010500982403755188, + -0.0007245479500852525, + -0.005346436053514481, + -0.008191692642867565, + -0.0036822916008532047, + 0.030887577682733536, + -0.007410901132971048, + -0.006653268821537495, + 0.009528301656246185, + 0.025369102135300636, + 0.016198111698031425, + 0.013048479333519936, + 0.022325340658426285, + 0.003407691139727831, + 0.011936844326555729, + 0.010673020966351032, + -0.032925575971603394, + -0.0029742857441306114, + 0.0036855998914688826, + 0.00412231357768178, + -0.02081669121980667, + -0.00041789707029238343, + 0.02765854261815548, + -0.008343880996108055, + -0.037080973386764526, + -0.028240827843546867, + -0.014901204966008663, + -0.014755633659660816, + -0.019982965663075447, + 0.001999950734898448, + 0.005237257573753595, + 0.016105476766824722, + 0.024694181978702545, + -0.009945164434611797, + -0.02524999901652336, + -0.00761602446436882, + -0.02477358467876911, + 0.0034109996631741524, + 0.04830319434404373, + 0.006378668360412121, + -0.008032887242734432, + 0.007126375567167997, + -0.01408071257174015, + 0.02139897644519806, + 0.0023324487265199423, + -0.0026235913392156363, + 0.012380175292491913, + -0.021346041932702065, + 0.022285638377070427, + -0.024998556822538376, + 0.012704402208328247, + -0.015033542178571224, + -0.01426598522812128, + 0.01100386492908001, + -0.014940906316041946, + -0.021319573745131493, + 0.008469601161777973, + 0.00362273957580328, + -0.015589360147714615, + 0.01249266229569912, + -0.02149161323904991, + -0.016409853473305702, + 0.01125530619174242, + -0.02677188068628311, + 0.022867923602461815, + -0.006236405577510595, + 0.0063522011041641235, + -0.024164831265807152, + 0.012314006686210632, + -0.023436974734067917, + -0.03300497680902481, + -0.021412210538983345, + -0.007238862570375204, + 0.031205186620354652, + 0.009574620053172112, + 0.000023055677957017906, + -0.02274882048368454, + -0.0018014444503933191, + -0.00007692118379054591, + 0.003645898774266243, + 0.013935141265392303, + 0.00849606841802597, + -0.017018605023622513, + -0.037689726799726486, + 0.01963888853788376, + 0.014993840828537941, + 0.01844784989953041, + -0.00514462124556303, + -0.005174397025257349, + 0.0005814579781144857, + 0.008529153652489185, + -0.03056996688246727, + -0.01594667136669159, + -0.0054688481613993645, + -0.03117872029542923, + -0.013445491902530193, + -0.008191692642867565, + -0.015695229172706604, + -0.030278824269771576, + -0.015298217535018921, + -0.0295906700193882, + 0.03959538787603378, + -0.0036756745539605618, + -0.008859996683895588, + 0.0014780446654185653, + 0.024019259959459305, + 0.0035301034804433584, + 0.013882205821573734, + 0.005075144115835428, + 0.016026074066758156, + -0.006623493041843176, + -0.009568003006279469, + -0.0002574378449935466, + 0.0061834705993533134, + -0.005495315417647362, + 0.008449750952422619, + -0.004254651255905628, + -0.02057848498225212, + 0.003143016016110778, + -0.007113141939043999, + 0.048250261694192886, + 0.003258811542764306, + -0.01103694923222065, + -0.0025441888719797134, + -0.016939202323555946, + -0.012856590561568737, + -0.019453614950180054, + -0.0010496019385755062, + -0.04258621484041214, + -0.021888626739382744, + -0.010997247882187366, + 0.0011554720113053918, + 0.01597313955426216, + -0.014715932309627533, + -0.03639281913638115, + 0.027023321017622948, + -0.0007084193057380617, + 0.023066429421305656, + 0.034407757222652435, + 0.025210298597812653, + 0.02051231637597084, + -0.006967570632696152, + -0.0011910377070307732, + 0.023397274315357208, + 0.002413505455479026, + 0.0018808470340445638, + 0.024085428565740585, + -0.00772851100191474, + 0.007807913701981306, + -0.007285180501639843, + -0.020194705575704575, + -0.029564201831817627, + -0.030305292457342148, + -0.029564201831817627, + 0.0027079565916210413, + 0.004049527924507856, + 0.013816037215292454, + -0.009746658615767956, + -0.019175706431269646, + -0.006087525747716427, + 0.03239622339606285, + -0.011063416488468647, + 0.004119005519896746, + -0.008840146474540234, + -0.014226283878087997, + -0.006408444605767727, + -0.022193001583218575, + 0.00007263055158546194, + -0.009045269340276718, + -0.008231393992900848, + -0.007867465727031231, + 0.011950078420341015, + 0.007305031176656485, + 0.006722745951265097, + -0.014133647084236145, + 0.0007022159988991916, + 0.02068435400724411, + -0.014146881178021431, + -0.01349181029945612, + -0.0033762610983103514, + 0.01905660331249237, + -0.0007576323114335537, + 0.00001762777174008079, + 0.0034870936069637537, + 0.026414569467306137, + -0.0190301351249218, + 0.016224579885601997, + -0.007821147330105305, + 0.013366089202463627, + 0.010712722316384315, + 0.02109460160136223, + 0.003970125690102577, + -0.01933451183140278, + -0.012135351076722145, + 0.013551361858844757, + 0.02194156125187874, + 0.0017865565605461597, + -0.02003590017557144, + -0.042030397802591324, + -0.010792125016450882, + -0.015787865966558456, + -0.01929481141269207, + -0.013074947521090508, + -0.004267884884029627, + -0.022179769352078438, + -0.015364386141300201, + -0.0023539536632597446, + -0.007126375567167997, + 0.01764059066772461, + 0.003080155700445175, + 0.013108031824231148, + 0.00016707612667232752, + 0.023278169333934784, + -0.020776990801095963, + 0.016608359292149544, + -0.0036591324023902416, + 0.0248794537037611, + -0.023264937102794647, + -0.0011612616945058107, + 0.014782100915908813, + -0.021915093064308167, + 0.006994037888944149, + -0.012201519683003426, + -0.03819260746240616, + -0.012572064064443111, + -0.010024567134678364, + 0.00018868435290642083, + -0.005637578200548887, + 0.010679638013243675, + 0.012194902636110783, + -0.014213049784302711, + -0.005273650400340557, + -0.03297850862145424, + -0.01480856817215681, + 0.004191791173070669, + -0.0048799458891153336, + 0.023741351440548897, + 0.02027410827577114, + -0.003784853033721447, + 0.00249621644616127, + -0.019691823050379753, + 0.019850628450512886, + -0.0036326649133116007, + -0.021981261670589447, + -0.014120413921773434, + 0.002299364423379302, + -0.007344732526689768, + -0.009773125872015953, + -0.04086582735180855, + -0.042268604040145874, + -0.011645701713860035, + -0.012486045248806477, + 0.01675393059849739, + 0.02552790753543377, + -0.015695229172706604, + 0.013280070386826992, + 0.021610716357827187, + 0.02498532459139824, + -0.006692970171570778, + -0.00007547374116256833, + -0.028690773993730545, + 0.013868972659111023, + -0.00691463565453887, + -0.02257678098976612, + -0.021610716357827187, + -0.01511294487863779, + 0.040945228189229965, + 0.011506747454404831, + -0.021081367507576942, + -0.018394915387034416, + -0.006716129370033741, + -0.03131105750799179, + -0.016648059710860252, + -0.023304637521505356, + 0.014160114340484142, + 0.03676336258649826, + 0.0008949324837885797, + 0.029749475419521332, + 0.027843814343214035, + 0.014213049784302711, + -0.002797284396365285, + 0.008191692642867565, + -0.03713390976190567, + 0.008099055849015713, + -0.007563089486211538, + 0.012479428201913834, + 0.0039337328635156155, + -0.025077959522604942, + -0.011056799441576004, + 0.008383582346141338, + 0.008059355430305004, + -0.019281577318906784, + 0.013948374427855015, + -0.01940068043768406, + 0.0007770693628117442, + -0.01820964366197586, + 0.022695884108543396, + 0.012234603986144066, + -0.01095092948526144, + 0.005988272838294506, + -0.02227240428328514, + -0.013723401352763176, + -0.013763101771473885, + -0.0022695884108543396, + 0.003781544743105769, + -0.004684748128056526, + 0.022232703864574432, + -0.006967570632696152, + 0.020260874181985855, + -0.0011215604608878493, + -0.020115302875638008, + -0.003989976365119219, + -0.01973152346909046, + 0.011738338507711887, + -0.02450890839099884, + 0.005118153523653746, + 0.021372510120272636, + 0.00581292575225234, + -0.01905660331249237, + -0.009389347396790981, + 0.016581891104578972, + -0.009634171612560749, + -0.002330794697627425, + 0.0010934387100860476, + 0.0007625949801877141, + 0.026890983805060387, + 0.004936189390718937, + 0.021200470626354218, + -0.03371959924697876, + 0.013055096380412579, + -0.015509957447648048, + 0.01942714862525463, + -0.016740696504712105, + 0.03819260746240616, + 0.010924462229013443, + -0.00802627019584179, + -0.013531511649489403, + -0.01240664254873991, + -0.011109734885394573, + 0.0031380534637719393, + -0.00836373120546341, + -0.0023142523132264614, + -0.022351806983351707, + -0.01757442206144333, + -0.013445491902530193, + 0.0024912538938224316, + -0.01597313955426216, + 0.028240827843546867, + 0.0010008024983108044, + 0.000474347296403721, + 0.030305292457342148, + 0.21893920004367828, + -0.007000654935836792, + 0.013604297302663326, + 0.004436615388840437, + -0.027949685230851173, + 0.0190301351249218, + 0.012545596808195114, + 0.002049577422440052, + 0.0032373066060245037, + -0.005551558919250965, + -0.008032887242734432, + -0.0010802049655467272, + 0.028717242181301117, + -0.005187630653381348, + -0.015311451628804207, + -0.029537735506892204, + -0.0115596828982234, + 0.003920499235391617, + -0.012194902636110783, + -0.011685403063893318, + -0.006861700676381588, + -0.0001154851634055376, + 0.014186582528054714, + -0.014160114340484142, + 0.014173348434269428, + 0.01861988939344883, + 0.007093291264027357, + -0.004919647239148617, + 0.006894784979522228, + 0.012095649726688862, + -0.015073243528604507, + -0.041421644389629364, + 0.004175249021500349, + 0.0010504290694370866, + 0.0046483553014695644, + -0.010328943841159344, + -0.018355214968323708, + 0.012836739420890808, + 0.0038543303962796926, + 0.004658280871808529, + -0.005720289424061775, + 0.014755633659660816, + 0.0013672120403498411, + 0.005210789851844311, + 0.014332153834402561, + 0.02352961152791977, + -0.003121511312201619, + -0.012108882889151573, + 0.006051132921129465, + 0.026798348873853683, + -0.024548610672354698, + 0.004370446782559156, + 0.030358226969838142, + 0.020049134269356728, + -0.019784459844231606, + 0.0031794088426977396, + 0.0218356903642416, + -0.009204074740409851, + 0.003851021872833371, + 0.003672366263344884, + 0.015827568247914314, + 0.010309092700481415, + -0.013736634515225887, + 0.02420453168451786, + -0.000696839764714241, + 0.023675182834267616, + -0.025236764922738075, + -0.020935796201229095, + 0.024760350584983826, + -0.012214752845466137, + -0.0018378372769802809, + -0.011652318760752678, + -0.02000943385064602, + 0.006451454013586044, + -0.021782755851745605, + -0.03107284940779209, + 0.0200756024569273, + 0.005118153523653746, + 0.018871329724788666, + -0.0006869144272059202, + 0.004046219866722822, + -0.013055096380412579, + 0.01644955389201641, + 0.004119005519896746, + -0.01581433415412903, + -0.027711477130651474, + 0.033190250396728516, + -0.013736634515225887, + 0.0033134007826447487, + 0.025633778423070908, + -0.0037716194055974483, + -0.037425052374601364, + 0.0058923279866576195, + 0.012234603986144066, + -0.009792977012693882, + 0.01818317547440529, + 0.019149240106344223, + 0.0019238566746935248, + -0.027817346155643463, + -0.028002619743347168, + -0.013101414777338505, + 0.011586150154471397, + 0.003262119833379984, + -0.0045027839951217175, + -0.01875222660601139, + 0.018394915387034416, + -0.016409853473305702, + 0.004578878171741962, + 0.019202174618840218, + -0.020287342369556427, + -0.007721894420683384, + -0.020062368363142014, + 0.009032036177814007, + -0.014411555603146553, + 0.002529300982132554, + 0.012175051495432854, + 0.01426598522812128, + -0.006815382279455662, + 0.02251061238348484, + -0.021266639232635498, + 0.0033828779123723507, + -0.031972743570804596, + 0.004760842304676771, + -0.008912932127714157, + 0.01181112416088581, + -0.025607310235500336, + -0.019255109131336212, + 0.03144339472055435, + 0.014504192396998405, + -0.014517426490783691, + -0.003860947210341692, + -0.009084970690310001, + 0.008826912380754948, + -0.008013037033379078, + -0.011248689144849777, + 0.02403249405324459, + 0.016595125198364258, + -0.005941954907029867, + -0.011182520538568497, + 0.01651572249829769, + 0.0028105180244892836, + 0.005118153523653746, + 0.01225445419549942, + 0.02470741607248783, + 0.01838168129324913, + -0.023860454559326172, + 0.03411661460995674, + 0.0001505339314462617, + -0.021372510120272636, + -0.022537080571055412, + -0.035995807498693466, + -0.011056799441576004, + -0.013485193252563477, + -0.0023622247390449047, + 0.010752423666417599, + -0.0037881615571677685, + -0.006385285407304764, + -0.023913390934467316, + -0.015576126053929329, + 0.02924659289419651, + -0.0347253642976284, + 0.036578088998794556, + 0.007152842823415995, + -0.03194627910852432, + -0.024733882397413254, + -0.0036095059476792812, + -0.16981551051139832, + 0.005051984917372465, + 0.0259249210357666, + -0.033057913184165955, + 0.03416954725980759, + 0.01318743359297514, + 0.0040726871229708195, + -0.005597877316176891, + -0.0010446392698213458, + 0.0004325782647356391, + 0.022695884108543396, + 0.0003378742258064449, + -0.030172955244779587, + -0.00022249245375860482, + -0.007735128048807383, + 0.01644955389201641, + -0.023807520046830177, + 0.015192347578704357, + 0.03056996688246727, + 0.0012208136031404138, + 0.019572719931602478, + -0.004545793868601322, + -0.005339819006621838, + 0.013088180683553219, + 0.02718212641775608, + 0.00023613976372871548, + -0.0014780446654185653, + 0.0005930375191383064, + 0.009422431699931622, + 0.014504192396998405, + -0.012082415632903576, + -0.024800051003694534, + 0.03993946313858032, + 0.014041011221706867, + 0.026361634954810143, + 0.011175903491675854, + -0.011910377070307732, + 0.00040838532731868327, + -0.01672746241092682, + 0.01631721667945385, + 0.012227986939251423, + 0.025541141629219055, + 0.012519129551947117, + -0.015920203179121017, + 0.027261529117822647, + 0.03676336258649826, + -0.008185075595974922, + 0.019559485837817192, + 0.021623950451612473, + -0.02559407614171505, + 0.008588705211877823, + -0.027579139918088913, + -0.00042513429070822895, + 0.004936189390718937, + 0.03689569979906082, + 0.017203878611326218, + 0.009250393137335777, + -0.00312316557392478, + 0.004707907326519489, + -0.023198766633868217, + -0.014530659653246403, + 0.0009247084381058812, + -0.006233097054064274, + -0.027605606243014336, + -0.01794496737420559, + -0.008383582346141338, + 0.0033845321740955114, + 0.007715277373790741, + -0.02498532459139824, + 0.01140749454498291, + -0.0025508056860417128, + 0.004727758001536131, + 0.028082022443413734, + -0.024151597172021866, + -0.0026186287868767977, + 0.011844208464026451, + -0.01365723181515932, + 0.016740696504712105, + -0.011566299013793468, + 0.015364386141300201, + -0.013683700002729893, + 0.015430554747581482, + -0.008443133905529976, + -0.00135645957197994, + 0.021676886826753616, + -0.008251244202256203, + -0.010507599450647831, + -0.023754585534334183, + -0.021915093064308167, + -0.021690119057893753, + -0.015893736854195595, + -0.0008118079858832061, + -0.012049331329762936, + -0.004529251717031002, + -0.02142544463276863, + 0.03678983077406883, + 0.008370348252356052, + -0.014755633659660816, + 0.009356263093650341, + -0.017521487548947334, + -0.006616875994950533, + -0.014702698215842247, + -0.005743448622524738, + 0.008992334827780724, + 0.016158411279320717, + 0.03128458932042122, + 0.008840146474540234, + 0.018474318087100983, + 0.002539226086810231, + -0.016290748491883278, + -0.015721697360277176, + 0.007033739238977432, + 0.013207284733653069, + 0.020737290382385254, + 0.006180162075906992, + -0.0011993087828159332, + 0.009243776090443134, + -0.008979100733995438, + 0.00137300172355026, + -0.004099154844880104, + 0.06770387291908264, + -0.001981754321604967, + -0.028055554255843163, + -0.013465343043208122, + -0.007688810117542744, + -0.025779349729418755, + -0.10147640854120255, + 0.0015905315522104502, + 0.016978904604911804, + 0.01672746241092682, + 0.0013167583383619785, + 0.02305319532752037, + 0.0011645702179521322, + -0.0022431209217756987, + 0.008859996683895588, + 0.02379428595304489, + -0.0027658541221171618, + -0.04891194775700569, + 0.0024680946953594685, + -0.014689465053379536, + 0.017322981730103493, + -0.021915093064308167, + -0.009157756343483925, + -0.02718212641775608, + 0.01790526695549488, + 0.02278852090239525, + -0.0002528887416701764, + 0.004936189390718937, + 0.008145374245941639, + -0.0026103577110916376, + -0.0038245543837547302, + -0.008959250524640083, + -0.031231654807925224, + 0.004820394329726696, + 0.009812827222049236, + -0.007827764376997948, + 0.012075798586010933, + -0.004132239148020744, + 0.021571015939116478, + -0.008707809261977673, + -0.008979100733995438, + 0.004949423484504223, + -0.014967373572289944, + 0.00898571778088808, + 0.04012473672628403, + -0.014954139478504658, + 0.007000654935836792, + 0.011930227279663086, + -0.006256256252527237, + -0.03446068987250328, + 0.003989976365119219, + -0.0038907232228666544, + -0.015337918885052204, + 0.011652318760752678, + 0.003844405058771372, + -0.0240721944719553, + -0.024972090497612953, + -0.01566876284778118, + -0.031363993883132935, + 0.002392000751569867, + 0.023238468915224075, + -0.01940068043768406, + 0.0007410901016555727, + 0.006540781818330288, + 0.004327436909079552, + 0.012909525074064732, + -0.004605345893651247, + -0.014424789696931839, + -0.015377620235085487, + -0.004446540493518114, + -0.009481983259320259, + -0.011632468551397324, + -0.011195754632353783, + -0.013683700002729893, + 0.02014177106320858, + 0.027235060930252075, + -0.017349449917674065, + 0.0070668235421180725, + 0.0036756745539605618, + 0.007960101589560509, + -0.029908278957009315, + -0.00973342452198267, + -0.007139609195291996, + -0.004056144971400499, + 0.006080909166485071, + -0.036710429936647415, + 0.010686255060136318, + -0.00836373120546341, + 0.024892687797546387, + -0.022523846477270126, + 0.01107003353536129, + 0.016343683004379272, + 0.0074572195298969746, + 0.021240172907710075, + 0.028293762356042862, + -0.012280922383069992, + 0.010368645191192627, + 0.019612420350313187, + -0.00612722709774971, + -0.02505149319767952, + -0.005257108248770237, + -0.011883909814059734, + 0.014662997797131538, + -0.007318264804780483, + -0.014530659653246403, + 0.018196409568190575, + -0.014504192396998405, + 0.0007634220528416336, + -0.07834381610155106, + 0.04660927504301071, + 0.014146881178021431, + 0.0037087590899318457, + 0.007556472439318895, + -0.00017472688341513276, + 0.023926623165607452, + -0.009435664862394333, + -0.001262169098481536, + -0.010276008397340775, + -0.00014019505761098117, + 0.017045073211193085, + -0.007086674217134714, + 0.007880699820816517, + -0.010864910669624805, + 0.0020462688989937305, + -0.009501834399998188, + -0.006874934304505587, + 0.002540880348533392, + -0.007523388136178255, + -0.011268540285527706, + 0.010335559956729412, + -0.0032803162466734648, + 0.02390015684068203, + -0.015218814834952354, + 0.004860095679759979, + 0.0011811123695224524, + 0.005376211833208799, + -0.007384433876723051, + -0.008343880996108055, + 0.014199815690517426, + 0.008376965299248695, + -0.003113240236416459, + 0.019387446343898773, + -0.007490303833037615, + -0.01872575841844082, + -0.016370151191949844, + 0.011109734885394573, + 0.015999605879187584, + 0.0451006256043911, + -0.016462787985801697, + -0.02494562231004238, + 0.030728772282600403, + -0.024932388216257095, + -0.005846010055392981, + 0.02116077020764351, + -0.017150942236185074, + 0.0015648911939933896, + 0.017375916242599487, + -0.000653830065857619, + 0.01905660331249237, + 0.01511294487863779, + -0.01655542477965355, + -0.01746855303645134, + -0.00479061808437109, + -0.029987681657075882, + 0.035122379660606384, + -0.017322981730103493, + 0.006530856713652611, + -0.009369496256113052, + 0.03493710607290268, + -0.01262499950826168, + 0.01807730458676815, + -0.016780396923422813, + 0.027023321017622948, + 0.02325170300900936, + -0.012499278411269188, + 0.0040032099932432175, + -0.00549200689420104, + -0.005343127530068159, + -0.027579139918088913, + -0.010785507969558239, + 0.024178065359592438, + 0.013134499080479145, + 0.017958201467990875, + 0.028955450281500816, + -0.014054244384169579, + 0.006908018607646227, + -0.0032555030193179846, + -0.0033266344107687473, + 0.017693527042865753, + 0.004896488506346941, + -0.045921120792627335, + 0.006269489880651236, + 0.007530004717409611, + 0.007252096198499203, + -0.007735128048807383, + 0.011698637157678604, + 0.009442281909286976, + -0.001176149700768292, + -0.01206918153911829, + 0.013816037215292454, + 0.002865107264369726, + 0.00932979490607977, + 0.005859243683516979, + 0.022722352296113968, + 0.007516771089285612, + -0.014967373572289944, + 0.025474973022937775, + -0.00427450193092227, + -0.0015930129447951913, + -0.014835036359727383, + 0.008773977868258953, + -0.013670465908944607, + -0.026493972167372704, + -0.0031876801513135433, + -0.023847220465540886, + -0.029193656519055367, + -0.0005053639179095626, + 0.030040618032217026, + 0.01922864094376564, + 0.020327042788267136, + 0.003903956850990653, + 0.017018605023622513, + -0.006325733382254839, + 0.0054688481613993645, + 0.022285638377070427, + -0.01364399865269661, + -0.017018605023622513, + 0.032952044159173965, + 0.018540486693382263, + 0.008264478296041489, + 0.010375261306762695, + -0.025025025010108948, + 0.02779087983071804, + -0.0032736994326114655, + 0.024363337084650993, + -0.008754126727581024, + -0.0012216407340019941, + 0.01028924249112606, + 0.03459302708506584, + -0.005134695675224066, + -0.03046409785747528, + 0.009931931272149086, + -0.0006112339324317873, + -0.003662440925836563, + -0.004582186695188284, + 0.03483123704791069, + -0.018566954880952835, + 0.055740565061569214, + 0.011956695467233658, + 0.0008998951525427401, + 0.006170236971229315, + -0.00614376924932003, + 0.01996973156929016, + 0.003741843393072486, + 0.020604951307177544, + -0.005882402881979942, + -0.007450602483004332, + 0.0072785634547472, + -0.006236405577510595, + 0.03607520833611488, + 0.017799396067857742, + -0.02555437572300434, + 0.0034109996631741524, + 0.0027493119705468416, + 0.019625654444098473, + -0.012433109804987907, + -0.008661490865051746, + 0.010123820044100285, + 0.008714425377547741, + 0.02674541249871254, + -0.008879847824573517, + -0.020631419494748116, + 0.008879847824573517, + 0.021676886826753616, + 0.0034606261178851128, + -0.012261071242392063, + -0.011731721460819244, + 0.002350645139813423, + -0.028479034081101418, + -0.029802409932017326, + -0.01879192888736725, + 0.01803760416805744, + -0.02461477927863598, + -0.001672415412031114, + -0.01516588032245636, + 0.006633418146520853, + 0.00425795977935195, + 0.0013680391712114215, + 0.033666666597127914, + -0.01824934408068657, + -0.00450940104201436, + 0.005928720813244581, + -0.004314203280955553, + 0.011612617410719395, + -0.015827568247914314, + -0.0006145423394627869, +] diff --git a/examples/embedding_biology.py b/examples/data/embedding_biology.py similarity index 100% rename from examples/embedding_biology.py rename to examples/data/embedding_biology.py diff --git a/examples/database_operations/create_fulltext_index.py b/examples/database_operations/create_fulltext_index.py new file mode 100644 index 00000000..93920cc6 --- /dev/null +++ b/examples/database_operations/create_fulltext_index.py @@ -0,0 +1,12 @@ +import neo4j +from neo4j_graphrag.indexes import create_fulltext_index + +NEO4J_URL = "neo4j://localhost:7687" +NEO4J_AUTH = ("neo4j", "password") +FULLTEXT_INDEX_NAME = "fulltext_index" + +driver = neo4j.GraphDatabase.driver(NEO4J_URL, auth=NEO4J_AUTH) + +create_fulltext_index( + driver, FULLTEXT_INDEX_NAME, label="Document", node_properties=["textProperty"] +) diff --git a/examples/database_operations/create_vector_index.py b/examples/database_operations/create_vector_index.py new file mode 100644 index 00000000..bfc82d05 --- /dev/null +++ b/examples/database_operations/create_vector_index.py @@ -0,0 +1,18 @@ +import neo4j +from neo4j_graphrag.indexes import create_vector_index + +NEO4J_URL = "neo4j://localhost:7687" +NEO4J_AUTH = ("neo4j", "password") +INDEX_NAME = "vector_index" +DIMENSION = 1536 + +driver = neo4j.GraphDatabase.driver(NEO4J_URL, auth=NEO4J_AUTH) + +create_vector_index( + driver, + INDEX_NAME, + label="Document", + embedding_property="vectorProperty", + dimensions=DIMENSION, + similarity_fn="euclidean", +) diff --git a/examples/database_operations/populate_vector_index.py b/examples/database_operations/populate_vector_index.py new file mode 100644 index 00000000..1bf0a60f --- /dev/null +++ b/examples/database_operations/populate_vector_index.py @@ -0,0 +1,13 @@ +import neo4j +from neo4j_graphrag.indexes import upsert_vector + +NEO4J_URL = "neo4j://localhost:7687" +NEO4J_AUTH = ("neo4j", "password") + +driver = neo4j.GraphDatabase.driver(NEO4J_URL, auth=NEO4J_AUTH) + +id = 1 +embedding_property = "embedding" +vector = [1.0, 2.0, 3.0] + +upsert_vector(driver, id, embedding_property, vector) diff --git a/examples/graphrag_with_langchain_llm.py b/examples/graphrag_with_langchain_llm.py deleted file mode 100644 index 3fe87c2f..00000000 --- a/examples/graphrag_with_langchain_llm.py +++ /dev/null @@ -1,58 +0,0 @@ -"""End to end example of building a RAG pipeline backed by a Neo4j database. -Requires OPENAI_API_KEY to be in the env var. - -This example illustrates: -- VectorCypherRetriever with a custom formatter function to extract relevant - context from neo4j result -- Logging configuration -""" - -import logging - -import neo4j -from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings -from neo4j_graphrag.generation import GraphRAG -from neo4j_graphrag.llm import OpenAILLM -from neo4j_graphrag.retrievers import VectorCypherRetriever -from neo4j_graphrag.types import RetrieverResultItem - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") -DATABASE = "neo4j" -INDEX = "moviePlotsEmbedding" - - -# setup logger config -logger = logging.getLogger("neo4j_graphrag") -logging.basicConfig(format="%(asctime)s - %(message)s") -logger.setLevel(logging.DEBUG) - - -def formatter(record: neo4j.Record) -> RetrieverResultItem: - return RetrieverResultItem(content=f'{record.get("title")}: {record.get("plot")}') - - -driver = neo4j.GraphDatabase.driver( - URI, - auth=AUTH, - database=DATABASE, -) - -embedder = OpenAIEmbeddings() - -retriever = VectorCypherRetriever( - driver, - index_name=INDEX, - retrieval_query="with node, score return node.title as title, node.plot as plot", - format_record_function=formatter, - embedder=embedder, # type: ignore -) - -llm = OpenAILLM(model_name="gpt-4o", model_params={"temperature": 0}) - -rag = GraphRAG(retriever=retriever, llm=llm) - -result = rag.search("Tell me more about Avatar movies") -print(result.answer) - -driver.close() diff --git a/examples/graphrag_with_mistral.py b/examples/graphrag_with_mistral.py deleted file mode 100644 index ae710e6b..00000000 --- a/examples/graphrag_with_mistral.py +++ /dev/null @@ -1,68 +0,0 @@ -"""End to end example of building a RAG pipeline backed by a Neo4j database. -Requires MISTRAL_API_KEY to be in the env var. - -This example illustrates: -- VectorCypherRetriever with a custom formatter function to extract relevant - context from neo4j result -- Logging configuration -""" - -import logging - -import neo4j -from neo4j_graphrag.embeddings.mistral import MistralAIEmbeddings -from neo4j_graphrag.generation import GraphRAG -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.llm.mistralai_llm import MistralAILLM -from neo4j_graphrag.retrievers import VectorCypherRetriever -from neo4j_graphrag.types import RetrieverResultItem - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") -DATABASE = "neo4j" -INDEX_NAME = "moviePlotsEmbedding" - - -# setup logger config -logger = logging.getLogger("neo4j_graphrag") -logging.basicConfig(format="%(asctime)s - %(message)s") -logger.setLevel(logging.DEBUG) - - -def formatter(record: neo4j.Record) -> RetrieverResultItem: - return RetrieverResultItem(content=f'{record.get("title")}: {record.get("plot")}') - - -driver = neo4j.GraphDatabase.driver( - URI, - auth=AUTH, -) - -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=1024, - similarity_fn="cosine", -) - - -embedder = MistralAIEmbeddings() - -retriever = VectorCypherRetriever( - driver, - index_name=INDEX_NAME, - retrieval_query="with node, score return node.title as title, node.plot as plot", - result_formatter=formatter, - embedder=embedder, -) - -llm = MistralAILLM(model_name="mistral-small-latest") - -rag = GraphRAG(retriever=retriever, llm=llm) - -result = rag.search("Tell me more about Avatar movies") -print(result.answer) - -driver.close() diff --git a/examples/pipeline/kg_builder.py b/examples/kg_builder.py similarity index 82% rename from examples/pipeline/kg_builder.py rename to examples/kg_builder.py index 2d587383..25917101 100644 --- a/examples/pipeline/kg_builder.py +++ b/examples/kg_builder.py @@ -12,6 +12,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +## ********************************************* +# WARNING: this example is linked from the doc +# page do not delete until the docs are updated +## ********************************************** + from __future__ import annotations import asyncio @@ -33,12 +39,14 @@ FixedSizeSplitter, ) from neo4j_graphrag.experimental.pipeline.pipeline import PipelineResult -from neo4j_graphrag.llm import OpenAILLM +from neo4j_graphrag.llm import LLMInterface, OpenAILLM logging.basicConfig(level=logging.INFO) -async def main(neo4j_driver: neo4j.Driver) -> PipelineResult: +async def define_and_run_pipeline( + neo4j_driver: neo4j.AsyncDriver, llm: LLMInterface +) -> PipelineResult: from neo4j_graphrag.experimental.pipeline import Pipeline # Instantiate Entity and Relation objects @@ -86,13 +94,7 @@ async def main(neo4j_driver: neo4j.Driver) -> PipelineResult: pipe.add_component(SchemaBuilder(), "schema") pipe.add_component( LLMEntityRelationExtractor( - llm=OpenAILLM( - model_name="gpt-4o", - model_params={ - "max_tokens": 2000, - "response_format": {"type": "json_object"}, - }, - ), + llm=llm, on_error=OnError.RAISE, ), "extractor", @@ -127,8 +129,23 @@ async def main(neo4j_driver: neo4j.Driver) -> PipelineResult: return await pipe.run(pipe_inputs) -if __name__ == "__main__": - with neo4j.GraphDatabase.driver( +async def main() -> PipelineResult: + llm = OpenAILLM( + model_name="gpt-4o", + model_params={ + "max_tokens": 2000, + "response_format": {"type": "json_object"}, + }, + ) + driver = neo4j.AsyncGraphDatabase.driver( "bolt://localhost:7687", auth=("neo4j", "password") - ) as driver: - print(asyncio.run(main(driver))) + ) + res = await define_and_run_pipeline(driver, llm) + await driver.close() + await llm.async_client.close() + return res + + +if __name__ == "__main__": + res = asyncio.run(main()) + print(res) diff --git a/examples/pipeline/__init__.py b/examples/old/pipeline/__init__.py similarity index 100% rename from examples/pipeline/__init__.py rename to examples/old/pipeline/__init__.py diff --git a/examples/pipeline/kg_builder_example.py b/examples/old/pipeline/kg_builder_example.py similarity index 100% rename from examples/pipeline/kg_builder_example.py rename to examples/old/pipeline/kg_builder_example.py diff --git a/examples/pipeline/kg_builder_two_documents_entity_resolution.py b/examples/old/pipeline/kg_builder_two_documents_entity_resolution.py similarity index 100% rename from examples/pipeline/kg_builder_two_documents_entity_resolution.py rename to examples/old/pipeline/kg_builder_two_documents_entity_resolution.py diff --git a/examples/openai_search.py b/examples/openai_search.py deleted file mode 100644 index f4d7e2ae..00000000 --- a/examples/openai_search.py +++ /dev/null @@ -1,51 +0,0 @@ -from random import random - -from neo4j import GraphDatabase -from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name-large" -DIMENSION = 3072 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - - -# Create Embedder object -embedder = OpenAIEmbeddings(model="text-embedding-3-large") - -# Initialize the retriever -retriever = VectorRetriever(driver, INDEX_NAME, embedder) - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="cosine", -) - -# Upsert the query -vector = [random() for _ in range(DIMENSION)] - -insert_query = ( - "MERGE (n:Document {id: $id})" - "WITH n " - "CALL db.create.setNodeVectorProperty(n, 'vectorProperty', $vector)" - "RETURN n" -) -parameters = { - "id": 0, - "vector": vector, -} -driver.execute_query(insert_query, parameters) - -# Perform the similarity search for a text query -query_text = "Find me a book about Fremen" -print(retriever.search(query_text=query_text, top_k=5)) diff --git a/examples/pipeline/rag.py b/examples/pipeline/rag.py deleted file mode 100644 index 75b66af9..00000000 --- a/examples/pipeline/rag.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) "Neo4j" -# Neo4j Sweden AB [https://neo4j.com] -# # -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# # -# https://www.apache.org/licenses/LICENSE-2.0 -# # -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -"""This example illustrates how to use a Pipeline with -the existing Retriever and LLM interfaces. It consists -in creating a Component wrapper around the required -objects. -""" - -from __future__ import annotations - -import asyncio -from typing import List - -import neo4j -from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings -from neo4j_graphrag.experimental.pipeline import Component, Pipeline -from neo4j_graphrag.experimental.pipeline.component import DataModel -from neo4j_graphrag.experimental.pipeline.pipeline import PipelineResult -from neo4j_graphrag.experimental.pipeline.types import ( - ComponentConfig, - ConnectionConfig, - PipelineConfig, -) -from neo4j_graphrag.generation import PromptTemplate, RagTemplate -from neo4j_graphrag.llm import LLMInterface, OpenAILLM -from neo4j_graphrag.retrievers import VectorRetriever -from neo4j_graphrag.retrievers.base import Retriever - - -class ComponentResultDataModel(DataModel): - """A simple DataModel with a single text field""" - - text: str - - -class RetrieverComponent(Component): - def __init__(self, retriever: Retriever) -> None: - self.retriever = retriever - - async def run(self, query: str) -> ComponentResultDataModel: - res = self.retriever.search(query_text=query) - return ComponentResultDataModel(text="\n".join(c.content for c in res.items)) - - -class PromptTemplateComponent(Component): - def __init__(self, prompt: PromptTemplate) -> None: - self.prompt = prompt - - async def run(self, query: str, context: List[str]) -> ComponentResultDataModel: - prompt = self.prompt.format(query, context, examples="") - return ComponentResultDataModel(text=prompt) - - -class LLMComponent(Component): - def __init__(self, llm: LLMInterface) -> None: - self.llm = llm - - async def run(self, prompt: str) -> ComponentResultDataModel: - llm_response = self.llm.invoke(prompt) - return ComponentResultDataModel(text=llm_response.content) - - -if __name__ == "__main__": - driver = neo4j.GraphDatabase.driver( - "bolt://localhost:7687", - auth=("neo4j", "password"), - database="neo4j", - ) - embedder = OpenAIEmbeddings() - retriever = VectorRetriever( - driver, index_name="moviePlotsEmbedding", embedder=embedder - ) - prompt_template = RagTemplate() - llm = OpenAILLM(model_name="gpt-4o") - - pipe = Pipeline.from_template( - PipelineConfig( - components=[ - ComponentConfig( - name="retrieve", component=RetrieverComponent(retriever) - ), - ComponentConfig( - name="augment", component=PromptTemplateComponent(prompt_template) - ), - ComponentConfig(name="generate", component=LLMComponent(llm)), - ], - connections=[ - ConnectionConfig( - start="retrieve", - end="augment", - input_config={"context": "retrieve.text"}, - ), - ConnectionConfig( - start="augment", - end="generate", - input_config={"prompt": "augment.text"}, - ), - ], - ) - ) - - query = "A movie about the US presidency" - pipe_output: PipelineResult = asyncio.run( - pipe.run({"retrieve": {"query": query}, "augment": {"query": query}}) - ) - print(pipe_output.result["generate"]["text"]) - - driver.close() diff --git a/examples/qdrant/__init__.py b/examples/qdrant/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/examples/graphrag.py b/examples/question_answering/graphrag.py similarity index 100% rename from examples/graphrag.py rename to examples/question_answering/graphrag.py diff --git a/examples/retrieve/hybrid_cypher_retriever.py b/examples/retrieve/hybrid_cypher_retriever.py new file mode 100644 index 00000000..232e0c43 --- /dev/null +++ b/examples/retrieve/hybrid_cypher_retriever.py @@ -0,0 +1,50 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +Also requires minimal Cypher knowledge to write the retrieval query. + +It shows how to use a hybrid retriever to find context +similar to a query **text** using vector+text similarity +and graph traversal. +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import HybridCypherRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" +FULLTEXT_INDEX_NAME = "movieFulltext" + +# for each Movie node matched by the vector search, retrieve more context: +# the name of all actors starring in that movie +RETRIEVAL_QUERY = " MATCH (node)<-[:ACTED_IN]-(p:Person) RETURN node.title as movieTitle, node.plot as moviePlot, collect(p.name) as actors, score as similarityScore" + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = HybridCypherRetriever( + driver=driver, + vector_index_name=INDEX_NAME, + fulltext_index_name=FULLTEXT_INDEX_NAME, + # note: embedder is optional if you only use query_vector + embedder=OpenAIEmbeddings(), + retrieval_query=RETRIEVAL_QUERY, + # optionally, configure how to format the results + # (see corresponding example in 'customize' directory) + # result_formatter=None, + # optionally, set neo4j database + # neo4j_database="neo4j", + ) + + # Perform the similarity search for a text query + # (retrieve the top 5 most similar nodes) + query_text = "Who were the actors in Avatar?" + print(retriever.search(query_text=query_text, top_k=5)) + + # note: it is also possible to query from a query_vector directly: + # query_vector: list[float] = [...] + # retriever.search(query_vector=query_vector, top_k=5) diff --git a/examples/retrieve/hybrid_retriever.py b/examples/retrieve/hybrid_retriever.py new file mode 100644 index 00000000..df9e1d29 --- /dev/null +++ b/examples/retrieve/hybrid_retriever.py @@ -0,0 +1,44 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +It shows how to use a hybrid retriever to find context +similar to a query **text** using vector+text similarity. +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import HybridRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" +FULLTEXT_INDEX_NAME = "movieFulltext" + + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = HybridRetriever( + driver=driver, + vector_index_name=INDEX_NAME, + fulltext_index_name=FULLTEXT_INDEX_NAME, + embedder=OpenAIEmbeddings(), + # optionally, provide a list of properties to fetch (default fetch all) + # return_properties=[], + # optionally, configure how to format the results + # (see corresponding example in 'customize' directory) + # result_formatter=None, + # optionally, set neo4j database + # neo4j_database="neo4j", + ) + + # Perform the similarity search for a text query + # (retrieve the top 5 most similar nodes) + query_text = "Find me a movie about aliens" + print(retriever.search(query_text=query_text, top_k=5)) + + # note: it is also possible to query from a query_vector directly: + # query_vector: list[float] = [...] + # retriever.search(query_vector=query_vector, top_k=5) diff --git a/examples/retrieve/similarity_search_for_text.py b/examples/retrieve/similarity_search_for_text.py new file mode 100644 index 00000000..32ddda5d --- /dev/null +++ b/examples/retrieve/similarity_search_for_text.py @@ -0,0 +1,38 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +It shows how to use a vector-only retriever to find context +similar to a query **text** using vector similarity. +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import VectorRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" + + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = VectorRetriever( + driver=driver, + index_name=INDEX_NAME, + embedder=OpenAIEmbeddings(), + # optionally, provide a list of properties to fetch (default fetch all) + # return_properties=[], + # optionally, configure how to format the results + # (see corresponding example in 'customize' directory) + # result_formatter=None, + # optionally, set neo4j database + # neo4j_database="neo4j", + ) + + # Perform the similarity search for a text query + # (retrieve the top 5 most similar nodes) + query_text = "Find me a movie about aliens" + print(retriever.search(query_text=query_text, top_k=5)) diff --git a/examples/retrieve/similarity_search_for_vector.py b/examples/retrieve/similarity_search_for_vector.py new file mode 100644 index 00000000..43b38a0e --- /dev/null +++ b/examples/retrieve/similarity_search_for_vector.py @@ -0,0 +1,29 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +It shows how to use a vector-only retriever to find context +similar to a query **vector** using vector similarity. +""" + +import neo4j +from embedding_avatar import EMBEDDINGS_AVATAR +from neo4j_graphrag.retrievers import VectorRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" + + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = VectorRetriever( + driver=driver, + index_name=INDEX_NAME, + ) + + # Perform the similarity search for a vector query + query_vector: list[float] = EMBEDDINGS_AVATAR + print(retriever.search(query_vector=query_vector, top_k=5)) diff --git a/examples/retrieve/text2cypher_search.py b/examples/retrieve/text2cypher_search.py new file mode 100644 index 00000000..16326ba1 --- /dev/null +++ b/examples/retrieve/text2cypher_search.py @@ -0,0 +1,54 @@ +"""The example leverages the Text2CypherRetriever to fetch some context. +It uses the OpenAILLM, hence the OPENAI_API_KEY needs to be set in the +environment for this example to run. +""" + +import neo4j +from neo4j_graphrag.llm import OpenAILLM +from neo4j_graphrag.retrievers import Text2CypherRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" + +# Create LLM object +llm = OpenAILLM(model_name="gpt-4o", model_params={"temperature": 0}) + +# (Optional) Specify your own Neo4j schema +neo4j_schema = """ +Node properties: +Person {name: STRING, born: INTEGER} +Movie {tagline: STRING, title: STRING, released: INTEGER} +Relationship properties: +ACTED_IN {roles: LIST} +REVIEWED {summary: STRING, rating: INTEGER} +The relationships: +(:Person)-[:ACTED_IN]->(:Movie) +(:Person)-[:DIRECTED]->(:Movie) +(:Person)-[:PRODUCED]->(:Movie) +(:Person)-[:WROTE]->(:Movie) +(:Person)-[:FOLLOWS]->(:Person) +(:Person)-[:REVIEWED]->(:Movie) +""" + +# (Optional) Provide user input/query pairs for the LLM to use as examples +examples = [ + "USER INPUT: 'Which actors starred in the Matrix?' QUERY: MATCH (p:Person)-[:ACTED_IN]->(m:Movie) WHERE m.title = 'The Matrix' RETURN p.name" +] + +with neo4j.GraphDatabase.driver(URI, auth=AUTH) as driver: + # Initialize the retriever + retriever = Text2CypherRetriever( + driver=driver, + llm=llm, + neo4j_schema=neo4j_schema, + examples=examples, + # optionally, you can also provide your own prompt + # for the text2Cypher generation step + # custom_prompt="", + ) + + # Generate a Cypher query using the LLM, send it to the Neo4j database, and return the results + query_text = "Which movies did Hugo Weaving star in?" + print(retriever.search(query_text=query_text)) diff --git a/examples/retrieve/vector_cypher_retriever.py b/examples/retrieve/vector_cypher_retriever.py new file mode 100644 index 00000000..d4b98334 --- /dev/null +++ b/examples/retrieve/vector_cypher_retriever.py @@ -0,0 +1,47 @@ +"""This example uses an example Movie database where movies' plots are embedded +using OpenAI embeddings. OPENAI_API_KEY needs to be set in the environment for +this example to run. + +Also requires minimal Cypher knowledge to write the retrieval query. + +It shows how to use a vector-cypher retriever to find context +similar to a query **text** using vector similarity + graph traversal. +""" + +import neo4j +from neo4j_graphrag.embeddings.openai import OpenAIEmbeddings +from neo4j_graphrag.retrievers import VectorCypherRetriever + +# Define database credentials +URI = "neo4j+s://demo.neo4jlabs.com" +AUTH = ("recommendations", "recommendations") +DATABASE = "recommendations" +INDEX_NAME = "moviePlotsEmbedding" + +# for each Movie node matched by the vector search, retrieve more context: +# the name of all actors starring in that movie +RETRIEVAL_QUERY = " MATCH (node)<-[:ACTED_IN]-(p:Person) RETURN node.title as movieTitle, node.plot as moviePlot, collect(p.name) as actors, score as similarityScore" + +with neo4j.GraphDatabase.driver(URI, auth=AUTH, database=DATABASE) as driver: + # Initialize the retriever + retriever = VectorCypherRetriever( + driver=driver, + index_name=INDEX_NAME, + # note: embedder is optional if you only use query_vector + embedder=OpenAIEmbeddings(), + retrieval_query=RETRIEVAL_QUERY, + # optionally, configure how to format the results + # (see corresponding example in 'customize' directory) + # result_formatter=None, + # optionally, set neo4j database + # neo4j_database="neo4j", + ) + + # Perform the similarity search for a text query + # (retrieve the top 5 most similar nodes) + query_text = "Who were the actors in Avatar?" + print(retriever.search(query_text=query_text, top_k=5)) + + # note: it is also possible to query from a query_vector directly: + # query_vector: list[float] = [...] + # retriever.search(query_vector=query_vector, top_k=5) diff --git a/examples/similarity_search_for_text.py b/examples/similarity_search_for_text.py deleted file mode 100644 index a50eca48..00000000 --- a/examples/similarity_search_for_text.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import annotations - -from random import random - -from neo4j import GraphDatabase -from neo4j_graphrag.embeddings.base import Embedder -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name" -DIMENSION = 1536 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - - -# Create CustomEmbedder object with the required Embedder type -class CustomEmbedder(Embedder): - def embed_query(self, text: str) -> list[float]: - return [random() for _ in range(DIMENSION)] - - -embedder = CustomEmbedder() - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="euclidean", -) - -# Initialize the retriever -retriever = VectorRetriever(driver, INDEX_NAME, embedder) - -# Upsert the query -vector = [random() for _ in range(DIMENSION)] -insert_query = ( - "MERGE (n:Document {id: $id})" - "WITH n " - "CALL db.create.setNodeVectorProperty(n, 'vectorProperty', $vector)" - "RETURN n" -) -parameters = { - "id": 0, - "vector": vector, -} -driver.execute_query(insert_query, parameters) - -# Perform the similarity search for a text query -query_text = "Find me a book about Fremen" -print(retriever.search(query_text=query_text, top_k=5)) diff --git a/examples/similarity_search_for_text_mistral.py b/examples/similarity_search_for_text_mistral.py deleted file mode 100644 index db95b890..00000000 --- a/examples/similarity_search_for_text_mistral.py +++ /dev/null @@ -1,50 +0,0 @@ -from __future__ import annotations - -from random import random - -from neo4j import GraphDatabase -from neo4j_graphrag.embeddings.mistral import MistralAIEmbeddings -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name" -DIMENSION = 1024 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - -embedder = MistralAIEmbeddings() - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="euclidean", -) - -# Initialize the retriever -retriever = VectorRetriever(driver, INDEX_NAME, embedder) - -# Upsert the query -vector = [random() for _ in range(DIMENSION)] -insert_query = ( - "MERGE (n:Document {id: $id})" - "WITH n " - "CALL db.create.setNodeVectorProperty(n, 'vectorProperty', $vector)" - "RETURN n" -) -parameters = { - "id": 0, - "vector": vector, -} -driver.execute_query(insert_query, parameters) - -# Perform the similarity search for a text query -query_text = "Find me a book about Fremen" -print(retriever.search(query_text=query_text, top_k=5)) diff --git a/examples/similarity_search_for_vector.py b/examples/similarity_search_for_vector.py deleted file mode 100644 index 740cf579..00000000 --- a/examples/similarity_search_for_vector.py +++ /dev/null @@ -1,45 +0,0 @@ -from random import random - -from neo4j import GraphDatabase -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name" -DIMENSION = 1536 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="euclidean", -) - -# Initialize the retriever -retriever = VectorRetriever(driver, INDEX_NAME) - -# Upsert the vector -vector = [random() for _ in range(DIMENSION)] -insert_query = ( - "MERGE (n:Document {id: $id})" - "WITH n " - "CALL db.create.setNodeVectorProperty(n, 'vectorProperty', $vector)" - "RETURN n" -) -parameters = { - "id": 0, - "vector": vector, -} -driver.execute_query(insert_query, parameters) - -# Perform the similarity search for a vector query -query_vector = [random() for _ in range(DIMENSION)] -print(retriever.search(query_vector=query_vector, top_k=5)) diff --git a/examples/text2cypher_search.py b/examples/text2cypher_search.py deleted file mode 100644 index 287024f5..00000000 --- a/examples/text2cypher_search.py +++ /dev/null @@ -1,46 +0,0 @@ -from neo4j import GraphDatabase -from neo4j_graphrag.llm import OpenAILLM -from neo4j_graphrag.retrievers import Text2CypherRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - -# Create LLM object -llm = OpenAILLM(model_name="gpt-3.5-turbo", model_params={"temperature": 0}) - -# (Optional) Specify your own Neo4j schema -neo4j_schema = """ -Node properties: -Person {name: STRING, born: INTEGER} -Movie {tagline: STRING, title: STRING, released: INTEGER} -Relationship properties: -ACTED_IN {roles: LIST} -REVIEWED {summary: STRING, rating: INTEGER} -The relationships: -(:Person)-[:ACTED_IN]->(:Movie) -(:Person)-[:DIRECTED]->(:Movie) -(:Person)-[:PRODUCED]->(:Movie) -(:Person)-[:WROTE]->(:Movie) -(:Person)-[:FOLLOWS]->(:Person) -(:Person)-[:REVIEWED]->(:Movie) -""" - -# (Optional) Provide user input/query pairs for the LLM to use as examples -examples = [ - "USER INPUT: 'Which actors starred in the Matrix?' QUERY: MATCH (p:Person)-[:ACTED_IN]->(m:Movie) WHERE m.title = 'The Matrix' RETURN p.name" -] - -# Initialize the retriever -retriever = Text2CypherRetriever( - driver=driver, - llm=llm, - neo4j_schema=neo4j_schema, - examples=examples, -) - -# Generate a Cypher query using the LLM, send it to the Neo4j database, and return the results -query_text = "Which movies did Hugo Weaving star in?" -print(retriever.search(query_text=query_text)) diff --git a/examples/vector_cypher_retrieval.py b/examples/vector_cypher_retrieval.py deleted file mode 100644 index 59218ac5..00000000 --- a/examples/vector_cypher_retrieval.py +++ /dev/null @@ -1,68 +0,0 @@ -from __future__ import annotations - -import random -import string - -from neo4j import GraphDatabase -from neo4j_graphrag.embeddings.base import Embedder -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorCypherRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name" -DIMENSION = 1536 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - - -# Create Embedder object -class CustomEmbedder(Embedder): - def embed_query(self, text: str) -> list[float]: - return [random.random() for _ in range(DIMENSION)] - - -# Generate random strings -def random_str(n: int) -> str: - return "".join([random.choice(string.ascii_letters) for _ in range(n)]) - - -embedder = CustomEmbedder() - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="euclidean", -) - -# Initialize the retriever -retrieval_query = "MATCH (node)-[:AUTHORED_BY]->(author:Author)" "RETURN author.name" -retriever = VectorCypherRetriever(driver, INDEX_NAME, retrieval_query, embedder) - -# Upsert the query -vector = [random.random() for _ in range(DIMENSION)] -insert_query = ( - "MERGE (doc:Document {id: $id})" - "WITH doc " - "CALL db.create.setNodeVectorProperty(doc, 'vectorProperty', $vector)" - "WITH doc " - "MERGE (author:Author {name: $authorName})" - "MERGE (doc)-[:AUTHORED_BY]->(author)" - "RETURN doc, author" -) -parameters = { - "id": random.randint(0, 10000), - "vector": vector, - "authorName": random_str(10), -} -driver.execute_query(insert_query, parameters) - -# Perform the search -query_text = "Find me a book about Fremen" -print(retriever.search(query_text=query_text, top_k=1)) diff --git a/examples/vector_search_with_filters.py b/examples/vector_search_with_filters.py deleted file mode 100644 index d2ee81c0..00000000 --- a/examples/vector_search_with_filters.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import annotations - -import random -import string - -from neo4j import GraphDatabase -from neo4j_graphrag.embeddings.base import Embedder -from neo4j_graphrag.indexes import create_vector_index -from neo4j_graphrag.retrievers import VectorRetriever - -URI = "neo4j://localhost:7687" -AUTH = ("neo4j", "password") - -INDEX_NAME = "embedding-name" -DIMENSION = 1536 - -# Connect to Neo4j database -driver = GraphDatabase.driver(URI, auth=AUTH) - - -# Create Embedder object -class CustomEmbedder(Embedder): - def embed_query(self, text: str) -> list[float]: - return [random.random() for _ in range(DIMENSION)] - - -# Generate random strings -def random_str(n: int) -> str: - return "".join([random.choice(string.ascii_letters) for _ in range(n)]) - - -embedder = CustomEmbedder() - -# Creating the index -create_vector_index( - driver, - INDEX_NAME, - label="Document", - embedding_property="vectorProperty", - dimensions=DIMENSION, - similarity_fn="euclidean", -) - -# Initialize the retriever -retriever = VectorRetriever(driver, INDEX_NAME, embedder) - -# Upsert the query -vector = [random.random() for _ in range(DIMENSION)] -insert_query = ( - "MERGE (doc:Document {id: $id})" - "ON CREATE SET doc.int_property = $id, " - " doc.short_text_property = toString($id)" - "WITH doc " - "CALL db.create.setNodeVectorProperty(doc, 'vectorProperty', $vector)" - "WITH doc " - "MERGE (author:Author {name: $authorName})" - "MERGE (doc)-[:AUTHORED_BY]->(author)" - "RETURN doc, author" -) -parameters = { - "id": random.randint(0, 10000), - "vector": vector, - "authorName": random_str(10), -} -driver.execute_query(insert_query, parameters) - -# Perform the search -query_text = "Find me a book about Fremen" -print( - retriever.search( - query_text=query_text, top_k=1, filters={"int_property": {"$gt": 100}} - ) -) diff --git a/examples/weaviate/__init__.py b/examples/weaviate/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/poetry.lock b/poetry.lock index c1aa00c4..eacf1e4a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -301,17 +301,17 @@ lxml = ["lxml"] [[package]] name = "boto3" -version = "1.35.41" +version = "1.35.43" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.35.41-py3-none-any.whl", hash = "sha256:2bf7e7f376aee52155fc4ae4487f29333a6bcdf3a05c3bc4fede10b972d951a6"}, - {file = "boto3-1.35.41.tar.gz", hash = "sha256:e74bc6d69c04ca611b7f58afe08e2ded6cb6504a4a80557b656abeefee395f88"}, + {file = "boto3-1.35.43-py3-none-any.whl", hash = "sha256:e6a50a0599f75b21de0de1a551a0564793d25b304fa623e4052e527b268de734"}, + {file = "boto3-1.35.43.tar.gz", hash = "sha256:0197f460632804577aa78b2f6daf7b823bffa9d4d67a5cebb179efff0fe9631b"}, ] [package.dependencies] -botocore = ">=1.35.41,<1.36.0" +botocore = ">=1.35.43,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -320,13 +320,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.35.41" +version = "1.35.43" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.35.41-py3-none-any.whl", hash = "sha256:915c4d81e3a0be3b793c1e2efdf19af1d0a9cd4a2d8de08ee18216c14d67764b"}, - {file = "botocore-1.35.41.tar.gz", hash = "sha256:8a09a32136df8768190a6c92f0240cd59c30deb99c89026563efadbbed41fa00"}, + {file = "botocore-1.35.43-py3-none-any.whl", hash = "sha256:7cfdee9117617da97daaf259dd8484bcdc259c59eb7d1ce7db9ecf8506b7d36c"}, + {file = "botocore-1.35.43.tar.gz", hash = "sha256:04539b85ade060601a3023cacb538fc17aad8c059a5a2e18fe4bc5d0d91fbd72"}, ] [package.dependencies] @@ -1698,13 +1698,13 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.25.2" +version = "0.26.0" description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" optional = false python-versions = ">=3.8.0" files = [ - {file = "huggingface_hub-0.25.2-py3-none-any.whl", hash = "sha256:1897caf88ce7f97fe0110603d8f66ac264e3ba6accdf30cd66cc0fed5282ad25"}, - {file = "huggingface_hub-0.25.2.tar.gz", hash = "sha256:a1014ea111a5f40ccd23f7f7ba8ac46e20fa3b658ced1f86a00c75c06ec6423c"}, + {file = "huggingface_hub-0.26.0-py3-none-any.whl", hash = "sha256:e43b8f36042b2103b48dea822535e08f5f089c4aa7013a067fca7b4ebf7f85a3"}, + {file = "huggingface_hub-0.26.0.tar.gz", hash = "sha256:524fe9281b015b76aa73ff1a83bf1cbe8cab851c9ac5ae5fcd2a25d5173ce629"}, ] [package.dependencies] @@ -1717,16 +1717,16 @@ tqdm = ">=4.42.1" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] cli = ["InquirerPy (==0.3.4)"] -dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "libcst (==1.4.0)", "mypy (==1.5.1)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.5.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] hf-transfer = ["hf-transfer (>=0.1.4)"] -inference = ["aiohttp", "minijinja (>=1.0)"] -quality = ["mypy (==1.5.1)", "ruff (>=0.5.0)"] +inference = ["aiohttp"] +quality = ["libcst (==1.4.0)", "mypy (==1.5.1)", "ruff (>=0.5.0)"] tensorflow = ["graphviz", "pydot", "tensorflow"] tensorflow-testing = ["keras (<3.0)", "tensorflow"] -testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio (>=4.0.0)", "jedi", "numpy", "pytest (>=8.1.1,<8.2.2)", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-mock", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] torch = ["safetensors[torch]", "torch"] typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] @@ -1987,13 +1987,13 @@ files = [ [[package]] name = "langchain-core" -version = "0.3.10" +version = "0.3.12" description = "Building applications with LLMs through composability" optional = false python-versions = "<4.0,>=3.9" files = [ - {file = "langchain_core-0.3.10-py3-none-any.whl", hash = "sha256:146be6bf2d3dc0d6f4feb46ef082182cf57b056e8163d45278529cd7b7343d2f"}, - {file = "langchain_core-0.3.10.tar.gz", hash = "sha256:63b9a3d03b52dba29cc248b752c574cdcb5fb04bd0fc5c76097fcbb7aaba5221"}, + {file = "langchain_core-0.3.12-py3-none-any.whl", hash = "sha256:46050d34f5fa36dc57dca971c6a26f505643dd05ee0492c7ac286d0a78a82037"}, + {file = "langchain_core-0.3.12.tar.gz", hash = "sha256:98a3c078e375786aa84939bfd1111263af2f3bc402bbe2cac9fa18a387459cf2"}, ] [package.dependencies] @@ -2005,9 +2005,25 @@ pydantic = [ {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, ] PyYAML = ">=5.3" -tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<9.0.0" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" typing-extensions = ">=4.7" +[[package]] +name = "langchain-openai" +version = "0.2.2" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_openai-0.2.2-py3-none-any.whl", hash = "sha256:3a203228cb38e4711ebd8c0a3bd51854e447f1d017e8475b6467b07ce7dd3e88"}, + {file = "langchain_openai-0.2.2.tar.gz", hash = "sha256:9ae8e2ec7d1ca84fd3bfa82186724528d68e1510a1dc9cdf617a7c669b7a7768"}, +] + +[package.dependencies] +langchain-core = ">=0.3.9,<0.4.0" +openai = ">=1.40.0,<2.0.0" +tiktoken = ">=0.7,<1" + [[package]] name = "langchain-text-splitters" version = "0.3.0" @@ -2024,13 +2040,13 @@ langchain-core = ">=0.3.0,<0.4.0" [[package]] name = "langsmith" -version = "0.1.135" +version = "0.1.136" description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." optional = false python-versions = "<4.0,>=3.8.1" files = [ - {file = "langsmith-0.1.135-py3-none-any.whl", hash = "sha256:b1d1ca3bad483a4239745c57e9b9157b4d099fbf3149be21e3d112c94ede06ac"}, - {file = "langsmith-0.1.135.tar.gz", hash = "sha256:7abed7e141386af99a2177f0b3600b124ae3ad1b482879ba0724ce92ef998a11"}, + {file = "langsmith-0.1.136-py3-none-any.whl", hash = "sha256:cad2215eb7a754ee259878e19c558f4f8d3795aa1b699f087d4500e640f80d0a"}, + {file = "langsmith-0.1.136.tar.gz", hash = "sha256:5c0de01a313db70dd9a85845c0f416a69b5b653b3e98ba413d7d41e8851315b1"}, ] [package.dependencies] @@ -2401,22 +2417,22 @@ files = [ [[package]] name = "marshmallow" -version = "3.22.0" +version = "3.23.0" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "marshmallow-3.22.0-py3-none-any.whl", hash = "sha256:71a2dce49ef901c3f97ed296ae5051135fd3febd2bf43afe0ae9a82143a494d9"}, - {file = "marshmallow-3.22.0.tar.gz", hash = "sha256:4972f529104a220bb8637d595aa4c9762afbe7f7a77d82dc58c1615d70c5823e"}, + {file = "marshmallow-3.23.0-py3-none-any.whl", hash = "sha256:82f20a2397834fe6d9611b241f2f7e7b680ed89c49f84728a1ad937be6b4bdf4"}, + {file = "marshmallow-3.23.0.tar.gz", hash = "sha256:98d8827a9f10c03d44ead298d2e99c6aea8197df18ccfad360dae7f89a50da2e"}, ] [package.dependencies] packaging = ">=17.0" [package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<4.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.0.2)", "sphinx-issues (==4.1.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "pytz", "simplejson"] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.13)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +tests = ["pytest", "simplejson"] [[package]] name = "mccabe" @@ -2766,46 +2782,50 @@ files = [ [[package]] name = "nvidia-cublas-cu12" -version = "12.1.3.1" +version = "12.4.5.8" description = "CUBLAS native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-manylinux1_x86_64.whl", hash = "sha256:ee53ccca76a6fc08fb9701aa95b6ceb242cdaab118c3bb152af4e579af792728"}, - {file = "nvidia_cublas_cu12-12.1.3.1-py3-none-win_amd64.whl", hash = "sha256:2b964d60e8cf11b5e1073d179d85fa340c120e99b3067558f3cf98dd69d02906"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0f8aa1706812e00b9f19dfe0cdb3999b092ccb8ca168c0db5b8ea712456fd9b3"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:2fc8da60df463fdefa81e323eef2e36489e1c94335b5358bcb38360adf75ac9b"}, + {file = "nvidia_cublas_cu12-12.4.5.8-py3-none-win_amd64.whl", hash = "sha256:5a796786da89203a0657eda402bcdcec6180254a8ac22d72213abc42069522dc"}, ] [[package]] name = "nvidia-cuda-cupti-cu12" -version = "12.1.105" +version = "12.4.127" description = "CUDA profiling tools runtime libs." optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:e54fde3983165c624cb79254ae9818a456eb6e87a7fd4d56a2352c24ee542d7e"}, - {file = "nvidia_cuda_cupti_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:bea8236d13a0ac7190bd2919c3e8e6ce1e402104276e6f9694479e48bb0eb2a4"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:79279b35cf6f91da114182a5ce1864997fd52294a87a16179ce275773799458a"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9dec60f5ac126f7bb551c055072b69d85392b13311fcc1bcda2202d172df30fb"}, + {file = "nvidia_cuda_cupti_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:5688d203301ab051449a2b1cb6690fbe90d2b372f411521c86018b950f3d7922"}, ] [[package]] name = "nvidia-cuda-nvrtc-cu12" -version = "12.1.105" +version = "12.4.127" description = "NVRTC native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:339b385f50c309763ca65456ec75e17bbefcbbf2893f462cb8b90584cd27a1c2"}, - {file = "nvidia_cuda_nvrtc_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:0a98a522d9ff138b96c010a65e145dc1b4850e9ecb75a0172371793752fd46ed"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0eedf14185e04b76aa05b1fea04133e59f465b6f960c0cbf4e37c3cb6b0ea198"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a178759ebb095827bd30ef56598ec182b85547f1508941a3d560eb7ea1fbf338"}, + {file = "nvidia_cuda_nvrtc_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:a961b2f1d5f17b14867c619ceb99ef6fcec12e46612711bcec78eb05068a60ec"}, ] [[package]] name = "nvidia-cuda-runtime-cu12" -version = "12.1.105" +version = "12.4.127" description = "CUDA Runtime native Libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:6e258468ddf5796e25f1dc591a31029fa317d97a0a94ed93468fc86301d61e40"}, - {file = "nvidia_cuda_runtime_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:dfb46ef84d73fababab44cf03e3b83f80700d27ca300e537f85f636fac474344"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:961fe0e2e716a2a1d967aab7caee97512f71767f852f67432d572e36cb3a11f3"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:64403288fa2136ee8e467cdc9c9427e0434110899d07c779f25b5c068934faa5"}, + {file = "nvidia_cuda_runtime_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:09c2e35f48359752dfa822c09918211844a3d93c100a715d79b59591130c5e1e"}, ] [[package]] @@ -2824,35 +2844,41 @@ nvidia-cublas-cu12 = "*" [[package]] name = "nvidia-cufft-cu12" -version = "11.0.2.54" +version = "11.2.1.3" description = "CUFFT native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-manylinux1_x86_64.whl", hash = "sha256:794e3948a1aa71fd817c3775866943936774d1c14e7628c74f6f7417224cdf56"}, - {file = "nvidia_cufft_cu12-11.0.2.54-py3-none-win_amd64.whl", hash = "sha256:d9ac353f78ff89951da4af698f80870b1534ed69993f10a4cf1d96f21357e253"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_aarch64.whl", hash = "sha256:5dad8008fc7f92f5ddfa2101430917ce2ffacd86824914c82e28990ad7f00399"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f083fc24912aa410be21fa16d157fed2055dab1cc4b6934a0e03cba69eb242b9"}, + {file = "nvidia_cufft_cu12-11.2.1.3-py3-none-win_amd64.whl", hash = "sha256:d802f4954291101186078ccbe22fc285a902136f974d369540fd4a5333d1440b"}, ] +[package.dependencies] +nvidia-nvjitlink-cu12 = "*" + [[package]] name = "nvidia-curand-cu12" -version = "10.3.2.106" +version = "10.3.5.147" description = "CURAND native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:9d264c5036dde4e64f1de8c50ae753237c12e0b1348738169cd0f8a536c0e1e0"}, - {file = "nvidia_curand_cu12-10.3.2.106-py3-none-win_amd64.whl", hash = "sha256:75b6b0c574c0037839121317e17fd01f8a69fd2ef8e25853d826fec30bdba74a"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1f173f09e3e3c76ab084aba0de819c49e56614feae5c12f69883f4ae9bb5fad9"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-manylinux2014_x86_64.whl", hash = "sha256:a88f583d4e0bb643c49743469964103aa59f7f708d862c3ddb0fc07f851e3b8b"}, + {file = "nvidia_curand_cu12-10.3.5.147-py3-none-win_amd64.whl", hash = "sha256:f307cc191f96efe9e8f05a87096abc20d08845a841889ef78cb06924437f6771"}, ] [[package]] name = "nvidia-cusolver-cu12" -version = "11.4.5.107" +version = "11.6.1.9" description = "CUDA solver native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-manylinux1_x86_64.whl", hash = "sha256:8a7ec542f0412294b15072fa7dab71d31334014a69f953004ea7a118206fe0dd"}, - {file = "nvidia_cusolver_cu12-11.4.5.107-py3-none-win_amd64.whl", hash = "sha256:74e0c3a24c78612192a74fcd90dd117f1cf21dea4822e66d89e8ea80e3cd2da5"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_aarch64.whl", hash = "sha256:d338f155f174f90724bbde3758b7ac375a70ce8e706d70b018dd3375545fc84e"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-manylinux2014_x86_64.whl", hash = "sha256:19e33fa442bcfd085b3086c4ebf7e8debc07cfe01e11513cc6d332fd918ac260"}, + {file = "nvidia_cusolver_cu12-11.6.1.9-py3-none-win_amd64.whl", hash = "sha256:e77314c9d7b694fcebc84f58989f3aa4fb4cb442f12ca1a9bde50f5e8f6d1b9c"}, ] [package.dependencies] @@ -2862,13 +2888,14 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-cusparse-cu12" -version = "12.1.0.106" +version = "12.3.1.170" description = "CUSPARSE native runtime libraries" optional = false python-versions = ">=3" files = [ - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-manylinux1_x86_64.whl", hash = "sha256:f3b50f42cf363f86ab21f720998517a659a48131e8d538dc02f8768237bd884c"}, - {file = "nvidia_cusparse_cu12-12.1.0.106-py3-none-win_amd64.whl", hash = "sha256:b798237e81b9719373e8fae8d4f091b70a0cf09d9d85c95a557e11df2d8e9a5a"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_aarch64.whl", hash = "sha256:9d32f62896231ebe0480efd8a7f702e143c98cfaa0e8a76df3386c1ba2b54df3"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ea4f11a2904e2a8dc4b1833cc1b5181cde564edd0d5cd33e3c168eff2d1863f1"}, + {file = "nvidia_cusparse_cu12-12.3.1.170-py3-none-win_amd64.whl", hash = "sha256:9bc90fb087bc7b4c15641521f31c0371e9a612fc2ba12c338d3ae032e6b6797f"}, ] [package.dependencies] @@ -2876,47 +2903,47 @@ nvidia-nvjitlink-cu12 = "*" [[package]] name = "nvidia-nccl-cu12" -version = "2.20.5" +version = "2.21.5" description = "NVIDIA Collective Communication Library (NCCL) Runtime" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1fc150d5c3250b170b29410ba682384b14581db722b2531b0d8d33c595f33d01"}, - {file = "nvidia_nccl_cu12-2.20.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:057f6bf9685f75215d0c53bf3ac4a10b3e6578351de307abad9e18a99182af56"}, + {file = "nvidia_nccl_cu12-2.21.5-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8579076d30a8c24988834445f8d633c697d42397e92ffc3f63fa26766d25e0a0"}, ] [[package]] name = "nvidia-nvjitlink-cu12" -version = "12.6.77" +version = "12.4.127" description = "Nvidia JIT LTO Library" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3bf10d85bb1801e9c894c6e197e44dd137d2a0a9e43f8450e9ad13f2df0dd52d"}, - {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9ae346d16203ae4ea513be416495167a0101d33d2d14935aa9c1829a3fb45142"}, - {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-win_amd64.whl", hash = "sha256:410718cd44962bed862a31dd0318620f6f9a8b28a6291967bcfcb446a6516771"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:4abe7fef64914ccfa909bc2ba39739670ecc9e820c83ccc7a6ed414122599b83"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:06b3b9b25bf3f8af351d664978ca26a16d2c5127dbd53c0497e28d1fb9611d57"}, + {file = "nvidia_nvjitlink_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:fd9020c501d27d135f983c6d3e244b197a7ccad769e34df53a42e276b0e25fa1"}, ] [[package]] name = "nvidia-nvtx-cu12" -version = "12.1.105" +version = "12.4.127" description = "NVIDIA Tools Extension" optional = false python-versions = ">=3" files = [ - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-manylinux1_x86_64.whl", hash = "sha256:dc21cf308ca5691e7c04d962e213f8a4aa9bbfa23d95412f452254c2caeb09e5"}, - {file = "nvidia_nvtx_cu12-12.1.105-py3-none-win_amd64.whl", hash = "sha256:65f4d98982b31b60026e0e6de73fbdfc09d08a96f4656dd3665ca616a11e1e82"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7959ad635db13edf4fc65c06a6e9f9e55fc2f92596db928d169c0bb031e88ef3"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-manylinux2014_x86_64.whl", hash = "sha256:781e950d9b9f60d8241ccea575b32f5105a5baf4c2351cab5256a24869f12a1a"}, + {file = "nvidia_nvtx_cu12-12.4.127-py3-none-win_amd64.whl", hash = "sha256:641dccaaa1139f3ffb0d3164b4b84f9d253397e38246a4f2f36728b48566d485"}, ] [[package]] name = "openai" -version = "1.51.2" +version = "1.52.0" description = "The official Python library for the openai API" optional = false python-versions = ">=3.7.1" files = [ - {file = "openai-1.51.2-py3-none-any.whl", hash = "sha256:5c5954711cba931423e471c37ff22ae0fd3892be9b083eee36459865fbbb83fa"}, - {file = "openai-1.51.2.tar.gz", hash = "sha256:c6a51fac62a1ca9df85a522e462918f6bb6bc51a8897032217e453a0730123a6"}, + {file = "openai-1.52.0-py3-none-any.whl", hash = "sha256:0c249f20920183b0a2ca4f7dba7b0452df3ecd0fa7985eb1d91ad884bc3ced9c"}, + {file = "openai-1.52.0.tar.gz", hash = "sha256:95c65a5f77559641ab8f3e4c3a050804f7b51d278870e2ec1f7444080bfe565a"}, ] [package.dependencies] @@ -3455,6 +3482,7 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs optional = false python-versions = ">=3.8" files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] @@ -3465,6 +3493,7 @@ description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" files = [ + {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, ] @@ -4257,11 +4286,6 @@ files = [ {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f60021ec1574e56632be2a36b946f8143bf4e5e6af4a06d85281adc22938e0dd"}, {file = "scikit_learn-1.5.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:394397841449853c2290a32050382edaec3da89e35b3e03d6cc966aebc6a8ae6"}, {file = "scikit_learn-1.5.2-cp312-cp312-win_amd64.whl", hash = "sha256:57cc1786cfd6bd118220a92ede80270132aa353647684efa385a74244a41e3b1"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9a702e2de732bbb20d3bad29ebd77fc05a6b427dc49964300340e4c9328b3f5"}, - {file = "scikit_learn-1.5.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:b0768ad641981f5d3a198430a1d31c3e044ed2e8a6f22166b4d546a5116d7908"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:178ddd0a5cb0044464fc1bfc4cca5b1833bfc7bb022d70b05db8530da4bb3dd3"}, - {file = "scikit_learn-1.5.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7284ade780084d94505632241bf78c44ab3b6f1e8ccab3d2af58e0e950f9c12"}, - {file = "scikit_learn-1.5.2-cp313-cp313-win_amd64.whl", hash = "sha256:b7b0f9a0b1040830d38c39b91b3a44e1b643f4b36e36567b80b7c6bd2202a27f"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:757c7d514ddb00ae249832fe87100d9c73c6ea91423802872d9e74970a0e40b9"}, {file = "scikit_learn-1.5.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:52788f48b5d8bca5c0736c175fa6bdaab2ef00a8f536cda698db61bd89c551c1"}, {file = "scikit_learn-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:643964678f4b5fbdc95cbf8aec638acc7aa70f5f79ee2cdad1eec3df4ba6ead8"}, @@ -4764,13 +4788,13 @@ files = [ [[package]] name = "sympy" -version = "1.13.3" +version = "1.13.1" description = "Computer algebra system (CAS) in Python" optional = false python-versions = ">=3.8" files = [ - {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"}, - {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"}, + {file = "sympy-1.13.1-py3-none-any.whl", hash = "sha256:db36cdc64bf61b9b24578b6f7bab1ecdd2452cf008f34faa33776680c26d66f8"}, + {file = "sympy-1.13.1.tar.gz", hash = "sha256:9cebf7e04ff162015ce31c9c6c9144daa34a93bd082f54fd8f12deca4f47515f"}, ] [package.dependencies] @@ -4993,31 +5017,28 @@ files = [ [[package]] name = "torch" -version = "2.4.1" +version = "2.5.0" description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration" optional = false python-versions = ">=3.8.0" files = [ - {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"}, - {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"}, - {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"}, - {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"}, - {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"}, - {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"}, - {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"}, - {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"}, - {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"}, - {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"}, - {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"}, - {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"}, - {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"}, - {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"}, - {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"}, - {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"}, - {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"}, - {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"}, - {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"}, - {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"}, + {file = "torch-2.5.0-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:7f179373a047b947dec448243f4e6598a1c960fa3bb978a9a7eecd529fbc363f"}, + {file = "torch-2.5.0-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:15fbc95e38d330e5b0ef1593b7bc0a19f30e5bdad76895a5cffa1a6a044235e9"}, + {file = "torch-2.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:f499212f1cffea5d587e5f06144630ed9aa9c399bba12ec8905798d833bd1404"}, + {file = "torch-2.5.0-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:c54db1fade17287aabbeed685d8e8ab3a56fea9dd8d46e71ced2da367f09a49f"}, + {file = "torch-2.5.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:499a68a756d3b30d10f7e0f6214dc3767b130b797265db3b1c02e9094e2a07be"}, + {file = "torch-2.5.0-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9f3df8138a1126a851440b7d5a4869bfb7c9cc43563d64fd9d96d0465b581024"}, + {file = "torch-2.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b81da3bdb58c9de29d0e1361e52f12fcf10a89673f17a11a5c6c7da1cb1a8376"}, + {file = "torch-2.5.0-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ba135923295d564355326dc409b6b7f5bd6edc80f764cdaef1fb0a1b23ff2f9c"}, + {file = "torch-2.5.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:2dd40c885a05ef7fe29356cca81be1435a893096ceb984441d6e2c27aff8c6f4"}, + {file = "torch-2.5.0-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:bc52d603d87fe1da24439c0d5fdbbb14e0ae4874451d53f0120ffb1f6c192727"}, + {file = "torch-2.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea718746469246cc63b3353afd75698a288344adb55e29b7f814a5d3c0a7c78d"}, + {file = "torch-2.5.0-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:6de1fd253e27e7f01f05cd7c37929ae521ca23ca4620cfc7c485299941679112"}, + {file = "torch-2.5.0-cp313-cp313-manylinux1_x86_64.whl", hash = "sha256:83dcf518685db20912b71fc49cbddcc8849438cdb0e9dcc919b02a849e2cd9e8"}, + {file = "torch-2.5.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:65e0a60894435608334d68c8811e55fd8f73e5bf8ee6f9ccedb0064486a7b418"}, + {file = "torch-2.5.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:38c21ff1bd39f076d72ab06e3c88c2ea6874f2e6f235c9450816b6c8e7627094"}, + {file = "torch-2.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:ce4baeba9804da5a346e210b3b70826f5811330c343e4fe1582200359ee77fe5"}, + {file = "torch-2.5.0-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:03e53f577a96e4d41aca472da8faa40e55df89d2273664af390ce1f570e885bd"}, ] [package.dependencies] @@ -5025,35 +5046,36 @@ filelock = "*" fsspec = "*" jinja2 = "*" networkx = "*" -nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cublas-cu12 = {version = "12.4.5.8", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-cupti-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-nvrtc-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cuda-runtime-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} -setuptools = "*" -sympy = "*" -triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} +nvidia-cufft-cu12 = {version = "11.2.1.3", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-curand-cu12 = {version = "10.3.5.147", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusolver-cu12 = {version = "11.6.1.9", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-cusparse-cu12 = {version = "12.3.1.170", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nccl-cu12 = {version = "2.21.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvjitlink-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +nvidia-nvtx-cu12 = {version = "12.4.127", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""} +setuptools = {version = "*", markers = "python_version >= \"3.12\""} +sympy = {version = "1.13.1", markers = "python_version >= \"3.9\""} +triton = {version = "3.1.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""} typing-extensions = ">=4.8.0" [package.extras] opt-einsum = ["opt-einsum (>=3.3)"] -optree = ["optree (>=0.11.0)"] +optree = ["optree (>=0.12.0)"] [[package]] name = "tox" -version = "4.22.0" +version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.22.0-py3-none-any.whl", hash = "sha256:03734d9a9ac138cd1a898a372fb1b8079e2728618ae06dc37cbf3686cfb56eea"}, - {file = "tox-4.22.0.tar.gz", hash = "sha256:acc6c627cb3316585238d55d2b633e132fea1bdb01b9d93b56bce7caea6ae73d"}, + {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, + {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, ] [package.dependencies] @@ -5160,21 +5182,16 @@ vision = ["Pillow (>=10.0.1,<=15.0)"] [[package]] name = "triton" -version = "3.0.0" +version = "3.1.0" description = "A language and compiler for custom Deep Learning operations" optional = false python-versions = "*" files = [ - {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"}, - {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"}, - {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"}, - {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"}, - {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"}, - {file = "triton-3.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b052da883351fdf6be3d93cedae6db3b8e3988d3b09ed221bccecfa9612230"}, - {file = "triton-3.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd34f19a8582af96e6291d4afce25dac08cb2a5d218c599163761e8e0827208e"}, - {file = "triton-3.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d5e10de8c011adeb7c878c6ce0dd6073b14367749e34467f1cff2bde1b78253"}, - {file = "triton-3.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8903767951bf86ec960b4fe4e21bc970055afc65e9d57e916d79ae3c93665e3"}, - {file = "triton-3.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41004fb1ae9a53fcb3e970745feb87f0e3c94c6ce1ba86e95fa3b8537894bef7"}, + {file = "triton-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b0dd10a925263abbe9fa37dcde67a5e9b2383fc269fdf59f5657cac38c5d1d8"}, + {file = "triton-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f34f6e7885d1bf0eaaf7ba875a5f0ce6f3c13ba98f9503651c1e6dc6757ed5c"}, + {file = "triton-3.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8182f42fd8080a7d39d666814fa36c5e30cc00ea7eeeb1a2983dbb4c99a0fdc"}, + {file = "triton-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dadaca7fc24de34e180271b5cf864c16755702e9f63a16f62df714a8099126a"}, + {file = "triton-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aafa9a20cd0d9fee523cd4504aa7131807a864cd77dcf6efe7e981f18b8c6c11"}, ] [package.dependencies] @@ -5290,13 +5307,13 @@ crypto-eth-addresses = ["eth-hash[pycryptodome] (>=0.7.0)"] [[package]] name = "virtualenv" -version = "20.26.6" +version = "20.27.0" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, - {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, + {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, + {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, ] [package.dependencies] @@ -5310,13 +5327,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "weaviate-client" -version = "4.8.1" +version = "4.9.0" description = "A python native Weaviate client" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "weaviate_client-4.8.1-py3-none-any.whl", hash = "sha256:c16453ebfd9bd4045675f8e50841d1af21aa9af1332f379d0418c4531c03bd44"}, - {file = "weaviate_client-4.8.1.tar.gz", hash = "sha256:2756996a2205bb991f258c064fc502011fc78a40e8786cb072208b1d3d7c9932"}, + {file = "weaviate_client-4.9.0-py3-none-any.whl", hash = "sha256:922a3a83c6946b6ea017d495af5980e90089f97004be4025a3d250a6c40ffaab"}, + {file = "weaviate_client-4.9.0.tar.gz", hash = "sha256:87b2995fd403f6106bd4cc8a9baa77280bdb95617ed6b9a60b0b34b5faeda999"}, ] [package.dependencies] @@ -5410,93 +5427,93 @@ files = [ [[package]] name = "yarl" -version = "1.15.3" +version = "1.15.4" description = "Yet another URL library" optional = false python-versions = ">=3.9" files = [ - {file = "yarl-1.15.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:14d6f07b7b4b3b8fba521904db58442281730b44318d6abb9908de79e2a4e4f4"}, - {file = "yarl-1.15.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eacd9de9b5b8262818a2e1f88efbd8d523abc8453de238c5d2f6a91fa85032dd"}, - {file = "yarl-1.15.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a63ed17af784da3de39b82adfd4f8404ad5ee2ec8f616b063f37da3e64e0521"}, - {file = "yarl-1.15.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b55cc82ba92c07af6ba619dcf70cc89f7b9626adefb87d251f80f2e77419f1da"}, - {file = "yarl-1.15.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:63ba82841ce315e4b5dc8b9345062638c74b1864d38172d0a0403e5a083b0950"}, - {file = "yarl-1.15.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59dce412b2515de05ab2eb6aef19ad7f70857ad436cd65fc4276df007106fb42"}, - {file = "yarl-1.15.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e337737b8c9d837e5b4d9e906cc57ed7a639e16e515c8094509b17f556fdb642"}, - {file = "yarl-1.15.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2128315cdc517a45ceb72ec17b256a7940eeb4843c66834c203e7d6580c83405"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69c2d111e67a818e702ba957da8c8e62de916f5c1b3da043f744084c63f12d46"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d2a70e8bec768be7423d8d465858a3646b34257a20cc02fd92612f1b14931f50"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:efe758958a7bffce68d91ade238df72667e1f18966ed7b1d3d390eead51a8903"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:b765f19e23c29b68e4f8bbadd36f1da2333ba983d8da2d6518e5f0a7eb2579c2"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df494e5a79f2ef8f81f966f787e515760e639c6319a321c16198b379c256a157"}, - {file = "yarl-1.15.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:68b27a7d9fb0f145de608da2e45e37fd2397b00266f10487e557f769afa2842d"}, - {file = "yarl-1.15.3-cp310-cp310-win32.whl", hash = "sha256:6d1aba1f644d6e5e16edada31938c11b6c9c97e3bf065742a2c7740d38af0c19"}, - {file = "yarl-1.15.3-cp310-cp310-win_amd64.whl", hash = "sha256:925e72fc7a4222a5bf6d288876d5afacc8f833b49c4cca85f65089131ba25afa"}, - {file = "yarl-1.15.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dbd4808a209b175b5ebbac24c4798dd7511c5ee522a16f2f0eac78c717dfcdfc"}, - {file = "yarl-1.15.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:20f8bdaf667386cea1a8f49cb69a85f90346656d750d3c1278be1dbc76601065"}, - {file = "yarl-1.15.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:adeac55335669a189189373c93d131ebfc2de3ec04f0d3aa7dff6661f83b89b6"}, - {file = "yarl-1.15.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:690d8f702945506b58c9c5834d586e8fd819b845fe6239ab16ebc64a92a6fd3d"}, - {file = "yarl-1.15.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:df7784a29b9689341c17d06d826e3b52ee59d6b6916177e4db0477be7aad5f72"}, - {file = "yarl-1.15.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12c80ec2af97ff3e433699bcabc787ef34e7c08ec038a6e6a25fb81d7bb83607"}, - {file = "yarl-1.15.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39533b927c665bcff7da80bf299218e4af12f3e2be27e9c456e29547bcefd631"}, - {file = "yarl-1.15.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db32a5c2912db45e73f80107d178e30f5c48cf596762b3c60ddfebdd655385f0"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bde319602111e9acca3c4f87f4205b38ba6166004bf108de47553633f9a580fc"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:493760c4ced954582db83c4760166992c016e1777ebc0f3ef1bb5eb60b2b5924"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d9cd73f7bff5079d87c2622aa418a75d5d3cdc944d3edb905c5dfc3235466eb0"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e924040582499f7514ec64691031504e6224b5ae7224216208fc2c94f8b13c89"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1c3e9ae98719fe180751b093d02dbcc33b78a37e861d0f2c9571720bd31555db"}, - {file = "yarl-1.15.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6f2911cae6dd012adaaf51494dad4cafb4284ad1f3b588df6ea3e3017e053750"}, - {file = "yarl-1.15.3-cp311-cp311-win32.whl", hash = "sha256:acdfe626607a245aedca35b211f9305a9e7a33349da525bf4ef3caaec8ef51cd"}, - {file = "yarl-1.15.3-cp311-cp311-win_amd64.whl", hash = "sha256:0ace3927502a9f90a868d62c66623703cf5096dcb586187266e9b964d8dd6c81"}, - {file = "yarl-1.15.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:decf9d76191bfe34835f1abd3fa8ebe8a9cd7e16300a5c7e82b18c0812bb22a2"}, - {file = "yarl-1.15.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ce65ed7ad7b6cbca06b0c011b170bd2b0bc56b0a740540e2713e5ac12d7b9b2e"}, - {file = "yarl-1.15.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3cf2b50352df8775591869aaa22c52b64d60376ba99c0802b42778fedc90b775"}, - {file = "yarl-1.15.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32e8ebf0080ddd38ec05f8be940a3719e5fe1ab8bb6d2b3f6f8b89c9e34149aa"}, - {file = "yarl-1.15.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05183fd49244517cb11c208d0ae128f2e8a85ddb7caf22ad8b0ffcdf5481fcb6"}, - {file = "yarl-1.15.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46653b5fd29e63ffe63335da343829a2b00bb43b0bd9bb21240d3b42629629e2"}, - {file = "yarl-1.15.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6316af233610b9868eda92cf68c016750cbf50085ac6c51faa17905ddd25605"}, - {file = "yarl-1.15.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5685ebc333c95b75be3a0a83a81b82b6411beee9585eaeb9e2e588ae8df23848"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6da6f6c6ee5595658f21bb9d1ecd702f7a7f22f224ac063dfb595624aec4a2e0"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:45c05b87a8494d9820ea1ac82118fd2f1d795d868e94766fe8ff670377bf6280"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04f930fcc940f96b8b29110c56882bcff8703f87a7b9354d3acf60ffded5a23d"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8df77742b403e71c5d62d22d150e6e35efd6096a15f2c7419815911c62225100"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f785d83ece0998e4ce4fadda22fa6c1ecc40e10f41617013a8726d2e9af0d98f"}, - {file = "yarl-1.15.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7794aade99be0d48b69bd5942acddfeff0de3d09c724d9abe4f19736708ef18f"}, - {file = "yarl-1.15.3-cp312-cp312-win32.whl", hash = "sha256:a3a98d70c667c957c7cd0b153d4cb5e45d43f5e2e23de73be6f7b5c883c01f72"}, - {file = "yarl-1.15.3-cp312-cp312-win_amd64.whl", hash = "sha256:90257bc627897a2c1d562efcd6a6b18887e9dacae795cad2367e8e16df47d966"}, - {file = "yarl-1.15.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f94d8adfdec402ff97cecc243b310c01d571362ca87bcf8def8e15cb3aaac3ee"}, - {file = "yarl-1.15.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0328f798052a33803a77d0868c7f802e952127092c1738fc9e7bfcaac7207c5"}, - {file = "yarl-1.15.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f5f0a0691e39c2e7b5c0f23e6765fa6cb162dce99d9ab1897fdd0f7a4a38b6fb"}, - {file = "yarl-1.15.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:370f646d3654e196ddbf772a2d737fe4e1dd738267015b73ff6267ca592fd9d6"}, - {file = "yarl-1.15.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3487c57bc8f17f2586ae7fd0e77f65cd298d45b64d15f604bbb29f4cce0e7961"}, - {file = "yarl-1.15.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef67989d480358482830dc3bc232709804f46a61e7e9841d3f0b1c13a4735b3b"}, - {file = "yarl-1.15.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5ab6c64921802176f56c36aa67c5e6a8baf9557ec1662cb41ecdb5580b67eb9"}, - {file = "yarl-1.15.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb474a06023d01ead9c072b2580c22b2691aa1cabdcc19c3171ab1fa6d8496e3"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92f9a45230d3aa8568c1d692ab27bf505a32dfe3b404721458fc374f411e8bd2"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:24cad94cf2f46cc8e4b9cd44e4e8a84483536a6c54554960b02b10b5724ab122"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:380f30073cbd9b740891bb56f44ee31f870e8721269b618ccc9913400936d9f6"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:353306ba6f0218af1aefe4b9c8b3a0b81b209bc75d79357dac6aca70a7b09d6a"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe03cea925d884b8f1157a7037df2f5b6a6478a64b78ee600832d8a9f044c83e"}, - {file = "yarl-1.15.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5c4cc1a438ac52562427330e33891f50a78ffd38d335abc64f93f201c83bdc82"}, - {file = "yarl-1.15.3-cp313-cp313-win32.whl", hash = "sha256:956975a3a1ce1f4537be22278d6a283b8bc74d77671f7f6469ab1e800f4e9b02"}, - {file = "yarl-1.15.3-cp313-cp313-win_amd64.whl", hash = "sha256:2e61b72cf15922a7a665299a6b6825bd9901d67ec3b9d3cf9b256dc1667c9bb1"}, - {file = "yarl-1.15.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:270fef2b335e60c91ee835c524445e2248af841c8b72f48769ed6c02fbff5873"}, - {file = "yarl-1.15.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:59b77f0682e1917be197fc8229530f0c6fb3ef8e242d8256ba091a3a1c0ef7e6"}, - {file = "yarl-1.15.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc4b999718287073dccd3acb0ef1593961bd7923af08991cb3c94080db503935"}, - {file = "yarl-1.15.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9b251d3f90e125ff0d1f76257329a9190fa1bfd2157344c875580bff6dedc62"}, - {file = "yarl-1.15.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ccb4667e0c0a25815efbfe251d24b56624449a319d4bb497074dd49444fb306"}, - {file = "yarl-1.15.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ac26e43b56dbafb30256906bc763cc1f22e05825ae1ced4c6afbd0e6584f18de"}, - {file = "yarl-1.15.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2207491555af5dbbee4c3179a76766f7bc1ecff858f420ea96f2e105ca42c4dd"}, - {file = "yarl-1.15.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:14effa29db6113be065a594e13a0f45afb9c1e374fd22b4bc3a4eff0725184b2"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:19077525cd36c797cae19262e15f2881da33c602fb35d075ff0e4263b51b8b88"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d80c019083506886df098b7bb0d844e19db7e226736829ef49f892ed0a070fa5"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c24debeec87908a864a2b4cb700f863db9441cabacdb22dc448c5d38b55c6f62"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1c49fe426c45520b4b8a48544d3a9a58194f39c1b57d92451883f847c299a137"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:66ddcd7ee3264bc937860f4780290d60f6472ca0484c214fe805116a831121e8"}, - {file = "yarl-1.15.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a5cbbb06559757f091f9e71d3f76c27d4dfe0652cc3f17ccce398b8377bfda4"}, - {file = "yarl-1.15.3-cp39-cp39-win32.whl", hash = "sha256:d798de0b50efb66583fc096bcdaa852ed6ea3485a4eb610d6a634f8010d932f4"}, - {file = "yarl-1.15.3-cp39-cp39-win_amd64.whl", hash = "sha256:8f0b33fd088e93ba5f7f6dd55226630e7b78212752479c8fcc6abbd143b9c1ce"}, - {file = "yarl-1.15.3-py3-none-any.whl", hash = "sha256:a1d49ed6f4b812dde88e937d4c2bd3f13d72c23ef7de1e17a63b7cacef4b5691"}, - {file = "yarl-1.15.3.tar.gz", hash = "sha256:fbcff47f8ba82467f203037f7a30decf5c724211b224682f7236edb0dcbb5b95"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:551205388d1da18a9975302c9a274ba24788f53bb9bb86187496ebf9e938916e"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eee724176b5bc50ee64905f559345448119b860a30b9489bd7a073f61baf925f"}, + {file = "yarl-1.15.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db818e33599f7b2e4c6507f2b2c24f45ff539a1b6e4e09163bb6f3cfb4616ca7"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07019a9de859c5a29916defd1e8c7557de6491a10bf50c49ff5284e6aedf5313"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db64a20e78969fc66665d2e5fc96cb4f4dc80f2137d8fed4b5a650ad569bb60f"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4076bfd8f1621449b19b9826848ed51bf0f2d1d38e82647c312c0730d8778903"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c23a442973dba3646811c284fce3dddd7fe5c2bd674ac73a122198e8218d6115"}, + {file = "yarl-1.15.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2bdb038b3f5c284e3919218c580dedc95f592c417a358361450b9519b22f7a8"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:59db8e6888d5302b8dbca0c1026ddabe99d81d67cdc101941519e13ffc9050fe"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f3294ce265011547630a59c20085fcb6af8cc5fa1fa44a203251f7d86cd5d913"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4851618679ca70b863ba2e7109be5f09f8fd7715ec505bd42e5a947dcfde3a45"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:dce1c56beef74d9c799a6ed94001693232a1402138292353a8ce302b64f457d9"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1e7468f31de61a82817f918743e5229fce774f73fad58487cdf88eef4f06d864"}, + {file = "yarl-1.15.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:527c68f48a91d953691291d3bce0209293aa5ad13ff05286ddb506791c331818"}, + {file = "yarl-1.15.4-cp310-cp310-win32.whl", hash = "sha256:c30115cecaf25fdcb67cc71c669d08425207f62d7a2f6d5416057c1460529216"}, + {file = "yarl-1.15.4-cp310-cp310-win_amd64.whl", hash = "sha256:df09c80f4bc2bc2efde309af383c3fe8fd8c51fe0519edb350b9c9e0af43ffa4"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:76259901cf1ac3db65e7e6dff04775b626d0715f9b51d92b447351144c756a82"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98d8dc1e8133f86d916125deca9780d791b22645f0d62bafe1452d1cd5eac631"}, + {file = "yarl-1.15.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d0f16c87c62b7a94b389ddf6a8c9d081265d788875c39f3a80108c4856eea7b"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8de5328d91859b461899497980d4cc8269e84e2d18640f6ac643886fda9000bf"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84937d00e2ea03616c40977de20189fa13a9213e5744a3c6afa0e7dd9141d69c"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691a3b498fdebef63308e8967bb598cfd326c56d628da82b799dd181bace4503"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a706db0c3b7e4578ff34ed2b1d2507b08fd491346ffc64468786fdf1151d938"}, + {file = "yarl-1.15.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:adb6b5d07d17c32f9d34c9dd4a693637a72323cfcb1f8a52d57033ab2dd21e99"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6e100c6c7d9e9d469009fd55cc4d7ad168d67d40758865c50da713f7ada491e5"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:df6b254e55c8ac2362afaa651e3e53453aa19a095570792346245773b434176e"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8721f8bedaa722c3c483cc06a1399cbfdb280eadf443aa5d324b0203cef2a75f"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1005921b30f4f39bf893946df6173567ff650307babb5ec04bbf64342a1f62c1"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ab79cc13307065a0b3ef087f09f0509996fc605d35d6642bb28e5d85b2648e1e"}, + {file = "yarl-1.15.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f337486742c700b102d640830aab3faf2848bed966b479a39e6783edd4ab1c6c"}, + {file = "yarl-1.15.4-cp311-cp311-win32.whl", hash = "sha256:20acf84bd1ce530065f8e957e4a5878fda4bc5f18cb02659828210e1519de54e"}, + {file = "yarl-1.15.4-cp311-cp311-win_amd64.whl", hash = "sha256:ab9ccf26cb3fa32747ba2a637a189d2d42386a2fc4afc10dbc7f85922dd23b0f"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f923e94e93a37fd990e8336e0b9bedea533e7cbed14e0c572bf9357ef2a70681"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3198da7d7c34e29fc8c823e0c3ce6c7274aac35760de557c2017489c7d98fc5a"}, + {file = "yarl-1.15.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d886de2ea81f513ba2d6820451d33b767a97c37867ba688d42e164b2dbca1362"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ac85e760543129a1912a82438fc8075223e35eaa2d457d61cd83c27d00d17be"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e58c5d07b1f78dd4cb180c5b3b82465cd281aaeee8aafea0e5d72a4b97922cb1"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9060589d0acad1fca048861fa9ee3e8ed060f67894fa885969648ab6e9e99a54"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccd6774aa7bebdf9ca608bb0839318757a71b8e0d2cf7b10c002bc8790bd343e"}, + {file = "yarl-1.15.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7694f109867ee428c21b85ae19fd31d164c691eb45cc95c561cfdeba237a12e3"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:83e7154aa0d17f5c93d27ac01088fd9ab6673e7bab1acbd07cd7a865b980c045"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f16d1940c0cbc342f1d29d6212a006d172be616d2942c5c41966e8a3ce4c3be1"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7d5226c70af3ad9569ccc4ccc04ab65be79eeb22c87d7ae789c89e62ef76bbd6"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f25906e4a72d9833e81717c39a39dee7297ff5cb44957d06d177a2ab8ef2ef7f"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e07e4b17b648c880e8e42bf1ac0a730bde114961646ae1c2ec4433f0c11ca94"}, + {file = "yarl-1.15.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f8136bde8dfa4477c6a85c79a366581b4a505b51a52b669318fb631d3f4f638"}, + {file = "yarl-1.15.4-cp312-cp312-win32.whl", hash = "sha256:ccbeaf5b18b173b9d78e332e017b30ba8bedcf03cdce1d13490b82a3f421bc98"}, + {file = "yarl-1.15.4-cp312-cp312-win_amd64.whl", hash = "sha256:f74f6ffdc633aefecbc80282242a5395058db9d1247fa7dd2f070ef84dc82583"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:4f66a0eda48844508736e47ed476d8fdd7cdbf16a4053b5d439509a25f708504"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:fd2bb86f40962d53a91def15a2f7684c62e081a7b96ec74ed0259c34b15973b9"}, + {file = "yarl-1.15.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f864b412557e69a6b953d62c01a0ed0ee342666298aa7f2a29af526bfa80f6e9"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a79c0a8bbb046add85663af85e9993b691bf20c2a109518bd35e0ce77edfe42"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de479e30abd2dfd49fdad3bd6953f2d930a45380be5143c0c9f7a1215cffc8cc"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21fabe58042f3e567b4edc75b2cf44cea02f228e41ac09d73de126bf685fe883"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77390496f2f32437a721c854897f889abefae0f3009daf90a2f703508d96c920"}, + {file = "yarl-1.15.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3896bf15284dd23acab1f2e7fceb350d8da6f6f2436b922f7ec6b3de685d34ca"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:590e2d733a82ecf004c5c531cbef0d6be328e93adec960024eb213f10cb9503e"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:1ceb677fb583971351627eac70eec6763fbc889761828da7a276681b5e39742d"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:69f628d2da1489b27959f4d63fdb326781fe484944dce94abbf919e416c54abe"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:35a6b69cc44bda002705d6138346bf0a0234cbb7c26c3bf192513eb946aee6f9"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:49f886e8dcf591275c6e20915b516fd81647857566b0c0158c52df1e468849c9"}, + {file = "yarl-1.15.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:49190eb2ece70313742b0ea51520340288a059674da1f39eefb589d598d9453e"}, + {file = "yarl-1.15.4-cp313-cp313-win32.whl", hash = "sha256:48334a6c8afee93097eb17c0a094234dac2d88da076c8cf372e09e2a5dcc4b66"}, + {file = "yarl-1.15.4-cp313-cp313-win_amd64.whl", hash = "sha256:f68025d6ba1816428b7de615c80f61cb03d5b7061158d4ced7696657a64aa59c"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8b569f4f511b59518ba6719feb5b8bf0a5d4115e6ac903c89e10a8a9ac656017"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fe17744d60fc404ac61f824118e1e15ce3c2e92eced9b8e22f3c7847acafbf2"}, + {file = "yarl-1.15.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:115346433fad2084ee3a1a925ccc0659990aa42e208ca54c278830a150a3caf3"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60165b8bc260f453321004b193770a66cc1b1a5c57c07d4b8dcc96839e7ad578"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65a0168691373e08d869d48b62c8bed0af0cdaef19c76e11ad73b43901bbdb5a"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:787532f00543a21b8f4ec3050b4e01b8fe437797903c0156a0b03dfca5e1ba6c"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51c9d173e5fa4b12d06ddca09a41cabbdeb660471dbe55432423eec095709ab"}, + {file = "yarl-1.15.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c96eaa30030e1cfafe533f3da8983812281235b7c50ef2a6c78ceca7aea1a0b"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4feab2dcb725eb5b4835207ecf3d370ff7ce930b253cba5e681646cb80d64c2c"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:de38b0b5b86e57efb129d179854e78b65cb8e294a8c75560877869c43aa2415a"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:65e0467f90f2acf3bc83bbfeedece8f1fd84df8add1a54e9600ed7b7b5debdb0"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:40c18f96696549e73b92dc12619f07019cbf5faefc1612608f967c144816e493"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:46491b3e058de7b484e1c9fb20aa8441f06d6c9a18395d711c1c2a9ad6707d6a"}, + {file = "yarl-1.15.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:faa3dd7f4620ab5e5da7a0789d0aac78a9ad0376f102409d442ec5a4179e200a"}, + {file = "yarl-1.15.4-cp39-cp39-win32.whl", hash = "sha256:c33ea7c55a73be343f02361795caf52a187357ea07708fb1cae6661ee1d689c8"}, + {file = "yarl-1.15.4-cp39-cp39-win_amd64.whl", hash = "sha256:11b207061f28b4b6d980239b22ab0ecfadc47846b5a3b8e79f27fcc019d02cf9"}, + {file = "yarl-1.15.4-py3-none-any.whl", hash = "sha256:e5cc288111c450c0a54a74475591b206d3b1cb47dc71bb6200f6be8b1337184c"}, + {file = "yarl-1.15.4.tar.gz", hash = "sha256:a0c5e271058d148d730219ca4f33c5d841c6bd46e05b0da60fea7b516906ccd3"}, ] [package.dependencies] @@ -5530,4 +5547,4 @@ kg-creation-tools = ["pygraphviz", "pygraphviz"] [metadata] lock-version = "2.0" python-versions = "^3.9.0" -content-hash = "03aa15f97dec08ce06ab69178dcf505276f25c507601084ab976ad7316d756de" +content-hash = "e9df336def667c8807af829545f1fde338cbea9fc06b6949998726d585d5a29d" diff --git a/pyproject.toml b/pyproject.toml index bdbbb88f..61fd756e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,6 +82,7 @@ cohere = {version = "^5.9.0"} anthropic = { version = "^0.34.2"} mistralai = {version = "^1.0.3"} qdrant-client = {version = "^1.11.3"} +langchain-openai = "^0.2.2" # needed in the examples [tool.poetry.extras] external_clients = ["weaviate-client", "pinecone-client", "google-cloud-aiplatform", "cohere", "anthropic", "mistralai", "qdrant-client"] diff --git a/src/neo4j_graphrag/embeddings/__init__.py b/src/neo4j_graphrag/embeddings/__init__.py index 635b581f..6c30951d 100644 --- a/src/neo4j_graphrag/embeddings/__init__.py +++ b/src/neo4j_graphrag/embeddings/__init__.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from .base import Embedder +from .cohere import CohereEmbeddings from .mistral import MistralAIEmbeddings from .openai import AzureOpenAIEmbeddings, OpenAIEmbeddings from .sentence_transformers import SentenceTransformerEmbeddings @@ -25,4 +26,5 @@ "AzureOpenAIEmbeddings", "VertexAIEmbeddings", "MistralAIEmbeddings", + "CohereEmbeddings", ]