# AI Provider Configuration # AI_PROVIDER: Which provider to use # Options: bedrock, openai, anthropic, google, azure, ollama, openrouter, deepseek # Default: bedrock AI_PROVIDER=bedrock # AI_MODEL: The model ID for your chosen provider (REQUIRED) AI_MODEL=global.anthropic.claude-sonnet-4-5-20250929-v1:0 # AWS Bedrock Configuration # AWS_REGION=us-east-1 # AWS_ACCESS_KEY_ID=your-access-key-id # AWS_SECRET_ACCESS_KEY=your-secret-access-key # OpenAI Configuration # OPENAI_API_KEY=sk-... # OPENAI_BASE_URL=https://api.openai.com/v1 # Optional: Custom OpenAI-compatible endpoint # OPENAI_ORGANIZATION=org-... # Optional # OPENAI_PROJECT=proj_... # Optional # Anthropic (Direct) Configuration # ANTHROPIC_API_KEY=sk-ant-... # ANTHROPIC_BASE_URL=https://your-custom-anthropic/v1 # Google Generative AI Configuration # GOOGLE_GENERATIVE_AI_API_KEY=... # GOOGLE_BASE_URL=https://generativelanguage.googleapis.com/v1beta # Optional: Custom endpoint # Azure OpenAI Configuration # AZURE_RESOURCE_NAME=your-resource-name # AZURE_API_KEY=... # AZURE_BASE_URL=https://your-resource.openai.azure.com # Optional: Custom endpoint (overrides resourceName) # Ollama (Local) Configuration # OLLAMA_BASE_URL=http://localhost:11434/api # Optional, defaults to localhost # OpenRouter Configuration # OPENROUTER_API_KEY=sk-or-v1-... # OPENROUTER_BASE_URL=https://openrouter.ai/api/v1 # Optional: Custom endpoint # DeepSeek Configuration # DEEPSEEK_API_KEY=sk-... # DEEPSEEK_BASE_URL=https://api.deepseek.com/v1 # Optional: Custom endpoint # Langfuse Observability (Optional) # Enable LLM tracing and analytics - https://langfuse.com # LANGFUSE_PUBLIC_KEY=pk-lf-... # LANGFUSE_SECRET_KEY=sk-lf-... # LANGFUSE_BASEURL=https://cloud.langfuse.com # EU region, use https://us.cloud.langfuse.com for US