forked from Dataherald/dataherald
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.env.example
36 lines (28 loc) · 1.7 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# Openai info. All these fields are required for the engine to work.
OPENAI_API_KEY = #This field is required for the engine to work.
ORG_ID =
LLM_MODEL = 'gpt-4' #the openAI llm model that you want to use for evaluation and generating the nl answer. possible values: gpt-4, gpt-3.5-turbo
AGENT_LLM_MODEL = 'gpt-4-32k' # the llm model that you want to use for the agent, it should have a lrage context window. possible values: gpt-4-32k, gpt-3.5-turbo-16k
DH_ENGINE_TIMEOUT = #timeout in seconds for the engine to return a response
#Encryption key for storing DB connection data in Mongo
ENCRYPT_KEY =
GOLDEN_RECORD_COLLECTION = 'my-golden-records'
#Pinecone info. These fields are required if the vector store used is Pinecone
PINECONE_API_KEY =
PINECONE_ENVIRONMENT =
# Module implementations to be used names for each required component. You can use the default ones or create your own
API_SERVER = "dataherald.api.fastapi.FastAPI"
SQL_GENERATOR = "dataherald.sql_generator.dataherald_sqlagent.DataheraldSQLAgent"
EVALUATOR = "dataherald.eval.simple_evaluator.SimpleEvaluator"
DB = "dataherald.db.mongo.MongoDB"
VECTOR_STORE = 'dataherald.vector_store.chroma.Chroma'
CONTEXT_STORE = 'dataherald.context_store.default.DefaultContextStore' # Set a context store class, the default one is DefaultContextStore
DB_SCANNER = 'dataherald.db_scanner.sqlalchemy.SqlAlchemyScanner'
# mongo database information
MONGODB_URI = "mongodb://admin:admin@mongodb:27017"
MONGODB_DB_NAME = 'dataherald'
MONGODB_DB_USERNAME = 'admin'
MONGODB_DB_PASSWORD = 'admin'
# The enncryption key is used to encrypt database connection info before storing in Mongo. Please refer to the README on how to set it.
S3_AWS_ACCESS_KEY_ID=
S3_AWS_SECRET_ACCESS_KEY=