-
Notifications
You must be signed in to change notification settings - Fork 482
/
Copy path.env.example
45 lines (39 loc) · 1.63 KB
/
.env.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
# LangSmith tracing
# Set this to `false` to disable tracing to LangSmith
LANGCHAIN_TRACING_V2=true
LANGCHAIN_API_KEY=
# LLM API keys
# Anthropic used for reflection
ANTHROPIC_API_KEY=
# OpenAI used for content generation
OPENAI_API_KEY=
# Optional, only required if using `Gemini 1.5 Flash` as the model.
GOOGLE_API_KEY=
# Optional, only required if using `Fireworks` as the model.
FIREWORKS_API_KEY=
# Feature flags for hiding/showing specific models
NEXT_PUBLIC_FIREWORKS_ENABLED=true
NEXT_PUBLIC_GEMINI_ENABLED=true
NEXT_PUBLIC_ANTHROPIC_ENABLED=true
NEXT_PUBLIC_OPENAI_ENABLED=true
# Set to false by default since the base OpenAI API is more common than the Azure OpenAI API.
NEXT_PUBLIC_AZURE_ENABLED=false
NEXT_PUBLIC_OLLAMA_ENABLED=false
# If using Ollama, set the API URL here. Only needs to be set if using the non default Ollama server port.
# It will default to `http://host.docker.internal:11434` if not set.
# OLLAMA_API_URL="http://host.docker.internal:11434"
# LangGraph Deployment, or local development server via LangGraph Studio.
# If running locally, this URL should be set in the `constants.ts` file.
# LANGGRAPH_API_URL=
# Supabase for authentication
# Public keys
NEXT_PUBLIC_SUPABASE_URL=
NEXT_PUBLIC_SUPABASE_ANON_KEY=
# Azure OpenAI Configuration
# ENSURE THEY ARE PREFIXED WITH AN UNDERSCORE.
_AZURE_OPENAI_API_KEY=your-azure-openai-api-key
_AZURE_OPENAI_API_INSTANCE_NAME=your-instance-name
_AZURE_OPENAI_API_DEPLOYMENT_NAME=your-deployment-name
_AZURE_OPENAI_API_VERSION=2024-08-01-preview
# Optional: Azure OpenAI Base Path (if using a different domain)
# _AZURE_OPENAI_API_BASE_PATH=https://your-custom-domain.com/openai/deployments