Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

sanity_test.py gets settings from settings.py #20

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 13 additions & 12 deletions test/sanity_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from docling.document_converter import DocumentConverter

from paragon import index_path_for_rag, execute_rag_query
from paragon.settings import DEFAULT_SETTINGS

SOURCE_PDF_URLS = [
"https://docs.redhat.com/en/documentation/red_hat_build_of_microshift/4.12/pdf/cli_tools/Red_Hat_build_of_MicroShift-4.12-CLI_tools-en-US.pdf",
Expand Down Expand Up @@ -45,36 +46,36 @@ def main():
# index the JSONs under a Milvus Lite instance
print("Step 2: Embedding the JSONs and indexing them into Milvus vector database \n")
index_path_for_rag(DOCS_LOCAL_DIR_NAME,
milvus_deployment_type="lite",
milvus_file_path="./milvus.db",
embedding_model="sentence-transformers/all-MiniLM-L12-v2",
milvus_deployment_type=DEFAULT_SETTINGS['milvus_deployment_type'],
milvus_file_path=DEFAULT_SETTINGS['milvus_file_path'],
embedding_model=DEFAULT_SETTINGS['retrieval_embedding_model'],
document_input_format='pdf' if TEST_PDF_TO_JSON_CONVERSION else 'json')

# execute a simple RAG query
print("Step 3: Executing simple RAG queries \n")
print("Question: How to install OpenShift CLI on macOS?")
result1 = execute_rag_query("How to install OpenShift CLI on macOS?",
milvus_file_path="./milvus.db",
embedding_model="sentence-transformers/all-MiniLM-L12-v2",
llm_base_url="http://vllm-service:8000/v1",
milvus_file_path=DEFAULT_SETTINGS['milvus_file_path'],
embedding_model=DEFAULT_SETTINGS['retrieval_embedding_model'],
llm_base_url=DEFAULT_SETTINGS['llm_base_url'],
top_k=3)
print("Response generated:")
print(f"\n{result1}")
print("\n")
print("Question: What are the two deployment options in OpenShift AI?")
result2 = execute_rag_query("What are the two deployment options in OpenShift AI?",
milvus_file_path="./milvus.db",
embedding_model="sentence-transformers/all-MiniLM-L12-v2",
llm_base_url="http://vllm-service:8000/v1",
milvus_file_path=DEFAULT_SETTINGS['milvus_file_path'],
embedding_model=DEFAULT_SETTINGS['retrieval_embedding_model'],
llm_base_url=DEFAULT_SETTINGS['llm_base_url'],
top_k=3)
print("Response generated:")
print(f"\n{result2}")
print("\n")
print("Question: What is OpenShift AI?")
result3 = execute_rag_query("What is OpenShift AI?",
milvus_file_path="./milvus.db",
embedding_model="sentence-transformers/all-MiniLM-L12-v2",
llm_base_url="http://vllm-service:8000/v1",
milvus_file_path=DEFAULT_SETTINGS['milvus_file_path'],
embedding_model=DEFAULT_SETTINGS['retrieval_embedding_model'],
llm_base_url=DEFAULT_SETTINGS['llm_base_url'],
top_k=3)
print("Response generated:")
print(f"\n{result3}")
Expand Down