diff --git a/README.md b/README.md index e72686a24..a09e30636 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ import asyncio from neo4j import GraphDatabase from neo4j_graphrag.embeddings import OpenAIEmbeddings from neo4j_graphrag.experimental.pipeline.kg_builder import SimpleKGPipeline -from neo4j_graphrag.llm.openai_llm import OpenAILLM +from neo4j_graphrag.llm import OpenAILLM NEO4J_URI = "neo4j://localhost:7687" NEO4J_USERNAME = "neo4j" @@ -149,6 +149,9 @@ asyncio.run(kg_builder.run_async(text=text)) driver.close() ``` +> Warning: In order to run this code, the `openai` Python package needs to be installed: `pip install "neo4j_graphrag[openai]"` + + Example knowledge graph created using the above script: ![Example knowledge graph](https://raw.githubusercontent.com/neo4j/neo4j-graphrag-python/fd276af0069e4dd1769255d358793cc96e299bf3/images/kg_construction.svg) diff --git a/docs/source/user_guide_kg_builder.rst b/docs/source/user_guide_kg_builder.rst index ea91ec2cc..edc89f609 100644 --- a/docs/source/user_guide_kg_builder.rst +++ b/docs/source/user_guide_kg_builder.rst @@ -357,6 +357,11 @@ can be omitted (the parser will automatically import from `neo4j_graphrag.llm`). For custom implementations, the full path must be explicitly specified, for example: `my_package.my_llm.MyLLM`. +.. warning:: + + Check the :ref:`installation` section to make sure you have the required dependencies installed when using an LLM. + + Defining an Embedder -------------------- @@ -800,7 +805,7 @@ It can be used in this way: from neo4j_graphrag.experimental.components.entity_relation_extractor import ( LLMEntityRelationExtractor, ) - from neo4j_graphrag.llm.openai import OpenAILLM + from neo4j_graphrag.llm import OpenAILLM extractor = LLMEntityRelationExtractor( llm=OpenAILLM( @@ -814,6 +819,10 @@ It can be used in this way: await extractor.run(chunks=TextChunks(chunks=[TextChunk(text="some text")])) +.. warning:: + + Using `OpenAILLM` requires the `openai` Python client. You can install it with `pip install "neo4j_graphrag[openai]"`. + .. warning:: The `LLMEntityRelationExtractor` works better if `"response_format": {"type": "json_object"}` is in the model parameters. @@ -834,13 +843,7 @@ This behaviour can be changed by using the `on_error` flag in the `LLMEntityRela ) extractor = LLMEntityRelationExtractor( - llm=OpenAILLM( - model_name="gpt-4o", - model_params={ - "max_tokens": 1000, - "response_format": {"type": "json_object"}, - }, - ), + # ... on_error=OnError.RAISE, ) diff --git a/docs/source/user_guide_rag.rst b/docs/source/user_guide_rag.rst index 8074eef77..f569505ec 100644 --- a/docs/source/user_guide_rag.rst +++ b/docs/source/user_guide_rag.rst @@ -56,10 +56,9 @@ In practice, it's done with only a few lines of code: print(response.answer) -.. note:: +.. warning:: - In order to run this code, the `openai` Python package needs to be installed: - `pip install openai` + Using `OpenAILLM` requires the `openai` Python client. You can install it with `pip install "neo4j_graphrag[openai]"`. The following sections provide more details about how to customize this code. @@ -108,7 +107,7 @@ to learn more about the configuration. .. note:: In order to run this code, the `openai` Python package needs to be installed: - `pip install openai` + `pip install "neo4j_graphrag[openai]"` See :ref:`azureopenaillm`. @@ -134,7 +133,7 @@ To use VertexAI, instantiate the `VertexAILLM` class: .. note:: In order to run this code, the `google-cloud-aiplatform` Python package needs to be installed: - `pip install google-cloud-aiplatform` + `pip install "neo4j_grpahrag[vertexai]"` See :ref:`vertexaillm`. @@ -160,7 +159,7 @@ To use Anthropic, instantiate the `AnthropicLLM` class: .. note:: In order to run this code, the `anthropic` Python package needs to be installed: - `pip install anthropic` + `pip install "neo4j_graphrag[anthropic]"` See :ref:`anthropicllm`. @@ -184,7 +183,7 @@ To use MistralAI, instantiate the `MistralAILLM` class: .. note:: In order to run this code, the `mistralai` Python package needs to be installed: - `pip install mistralai` + `pip install "neo4j_graphrag[mistralai]"` See :ref:`mistralaillm`. @@ -209,7 +208,7 @@ To use Cohere, instantiate the `CohereLLM` class: .. note:: In order to run this code, the `cohere` Python package needs to be installed: - `pip install cohere` + `pip install "neo4j_graphrag[cohere]"` See :ref:`coherellm`. @@ -630,7 +629,7 @@ Weaviate Retrievers .. note:: In order to import this retriever, the Weaviate Python client must be installed: - `pip install weaviate-client` + `pip install "neo4j_graphrag[weaviate]"` .. code:: python @@ -663,7 +662,7 @@ Pinecone Retrievers .. note:: In order to import this retriever, the Pinecone Python client must be installed: - `pip install pinecone-client` + `pip install "neo4j_graphrag[pinecone]"` .. code:: python @@ -691,7 +690,7 @@ Qdrant Retrievers .. note:: In order to import this retriever, the Qdrant Python client must be installed: - `pip install qdrant-client` + `pip install "neo4j_graphrag[qdrant]"` .. code:: python @@ -787,7 +786,7 @@ LLMs can be different. from neo4j import GraphDatabase from neo4j_graphrag.retrievers import Text2CypherRetriever - from neo4j_graphrag.llm.openai import OpenAILLM + from neo4j_graphrag.llm import OpenAILLM URI = "neo4j://localhost:7687" AUTH = ("neo4j", "password") @@ -796,7 +795,7 @@ LLMs can be different. driver = GraphDatabase.driver(URI, auth=AUTH) # Create LLM object - llm = OpenAILLM(model_name="gpt-3.5-turbo") + llm = OpenAILLM(model_name="gpt-4o") # (Optional) Specify your own Neo4j schema neo4j_schema = """ @@ -833,6 +832,10 @@ LLMs can be different. print(retriever.search(query_text=query_text)) +.. warning:: + + Using `OpenAILLM` requires the `openai` Python client. You can install it with `pip install "neo4j_graphrag[openai]"`. + .. note:: Since we are not performing any similarity search (vector index), the Text2Cypher diff --git a/examples/build_graph/simple_kg_builder_from_pdf.py b/examples/build_graph/simple_kg_builder_from_pdf.py index 05ac69892..f7ad683da 100644 --- a/examples/build_graph/simple_kg_builder_from_pdf.py +++ b/examples/build_graph/simple_kg_builder_from_pdf.py @@ -15,7 +15,7 @@ from neo4j_graphrag.experimental.pipeline.kg_builder import SimpleKGPipeline from neo4j_graphrag.experimental.pipeline.pipeline import PipelineResult from neo4j_graphrag.llm import LLMInterface -from neo4j_graphrag.llm.openai_llm import OpenAILLM +from neo4j_graphrag.llm import OpenAILLM # Neo4j db infos URI = "neo4j://localhost:7687" diff --git a/examples/build_graph/simple_kg_builder_from_text.py b/examples/build_graph/simple_kg_builder_from_text.py index 288a21ab5..29a5cfc5b 100644 --- a/examples/build_graph/simple_kg_builder_from_text.py +++ b/examples/build_graph/simple_kg_builder_from_text.py @@ -19,7 +19,7 @@ RelationInputType, ) from neo4j_graphrag.llm import LLMInterface -from neo4j_graphrag.llm.openai_llm import OpenAILLM +from neo4j_graphrag.llm import OpenAILLM logging.basicConfig() logging.getLogger("neo4j_graphrag").setLevel(logging.DEBUG)