diff --git a/libs/community/langchain_community/vectorstores/pinecone.py b/libs/community/langchain_community/vectorstores/pinecone.py index 7e2e9bbb0d75b..a2a92d68f32ab 100644 --- a/libs/community/langchain_community/vectorstores/pinecone.py +++ b/libs/community/langchain_community/vectorstores/pinecone.py @@ -401,7 +401,7 @@ def from_texts( pool_threads: int = 4, embeddings_chunk_size: int = 1000, **kwargs: Any, - ) -> PineconeVectorStore: + ) -> Pinecone: """ DEPRECATED: use langchain_pinecone.PineconeVectorStore.from_texts instead: Construct Pinecone wrapper from raw documents. @@ -451,7 +451,7 @@ def from_existing_index( text_key: str = "text", namespace: Optional[str] = None, pool_threads: int = 4, - ) -> PineconeVectorStore: + ) -> Pinecone: """Load pinecone vectorstore from index name.""" pinecone_index = cls.get_pinecone_index(index_name, pool_threads) return cls(pinecone_index, embedding, text_key, namespace) diff --git a/libs/community/tests/integration_tests/vectorstores/test_pinecone.py b/libs/community/tests/integration_tests/vectorstores/test_pinecone.py index 37820c0373da7..99a42dc516216 100644 --- a/libs/community/tests/integration_tests/vectorstores/test_pinecone.py +++ b/libs/community/tests/integration_tests/vectorstores/test_pinecone.py @@ -91,7 +91,7 @@ def test_from_texts( needs = f"foobuu {unique_id} booo" texts.insert(0, needs) - docsearch = PineconeVectorStore.from_texts( + docsearch = Pinecone.from_texts( texts=texts, embedding=embedding_openai, index_name=index_name, @@ -111,7 +111,7 @@ def test_from_texts_with_metadatas( texts.insert(0, needs) metadatas = [{"page": i} for i in range(len(texts))] - docsearch = PineconeVectorStore.from_texts( + docsearch = Pinecone.from_texts( texts, embedding_openai, index_name=index_name, @@ -128,7 +128,7 @@ def test_from_texts_with_scores(self, embedding_openai: OpenAIEmbeddings) -> Non """Test end to end construction and search with scores and IDs.""" texts = ["foo", "bar", "baz"] metadatas = [{"page": i} for i in range(len(texts))] - docsearch = PineconeVectorStore.from_texts( + docsearch = Pinecone.from_texts( texts, embedding_openai, index_name=index_name, @@ -157,7 +157,7 @@ def test_from_existing_index_with_namespaces( # Create two indexes with the same name but different namespaces texts_1 = ["foo", "bar", "baz"] metadatas = [{"page": i} for i in range(len(texts_1))] - PineconeVectorStore.from_texts( + Pinecone.from_texts( texts_1, embedding_openai, index_name=index_name, @@ -168,7 +168,7 @@ def test_from_existing_index_with_namespaces( texts_2 = ["foo2", "bar2", "baz2"] metadatas = [{"page": i} for i in range(len(texts_2))] - PineconeVectorStore.from_texts( + Pinecone.from_texts( texts_2, embedding_openai, index_name=index_name, @@ -177,7 +177,7 @@ def test_from_existing_index_with_namespaces( ) # Search with namespace - docsearch = PineconeVectorStore.from_existing_index( + docsearch = Pinecone.from_existing_index( index_name=index_name, embedding=embedding_openai, namespace=f"{index_name}-1", @@ -192,7 +192,7 @@ def test_add_documents_with_ids( self, texts: List[str], embedding_openai: OpenAIEmbeddings ) -> None: ids = [uuid.uuid4().hex for _ in range(len(texts))] - PineconeVectorStore.from_texts( + Pinecone.from_texts( texts=texts, ids=ids, embedding=embedding_openai, @@ -203,7 +203,7 @@ def test_add_documents_with_ids( assert index_stats["namespaces"][index_name]["vector_count"] == len(texts) ids_1 = [uuid.uuid4().hex for _ in range(len(texts))] - PineconeVectorStore.from_texts( + Pinecone.from_texts( texts=texts, ids=ids_1, embedding=embedding_openai, @@ -219,7 +219,7 @@ def test_relevance_score_bound(self, embedding_openai: OpenAIEmbeddings) -> None """Ensures all relevance scores are between 0 and 1.""" texts = ["foo", "bar", "baz"] metadatas = [{"page": i} for i in range(len(texts))] - docsearch = PineconeVectorStore.from_texts( + docsearch = Pinecone.from_texts( texts, embedding_openai, index_name=index_name, @@ -271,7 +271,7 @@ def test_from_texts_with_metadatas_benchmark( texts = [document.page_content for document in documents] * data_multiplier uuids = [uuid.uuid4().hex for _ in range(len(texts))] metadatas = [{"page": i} for i in range(len(texts))] - docsearch = PineconeVectorStore.from_texts( + docsearch = Pinecone.from_texts( texts, embedding_openai, ids=uuids,