mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-05 01:50:27 +08:00
- Create ccw-litellm Python package with AbstractEmbedder and AbstractLLMClient interfaces - Add BaseEmbedder abstraction and factory pattern to codex-lens for pluggable backends - Implement API Settings dashboard page for provider credentials and custom endpoints - Add REST API routes for CRUD operations on providers and endpoints - Extend CLI with --model parameter for custom endpoint routing - Integrate existing context-cache for @pattern file resolution - Add provider model registry with predefined models per provider type - Include i18n translations (en/zh) for all new UI elements 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
54 lines
1015 B
Plaintext
54 lines
1015 B
Plaintext
# LiteLLM Unified Configuration
|
|
# Copy to ~/.ccw/config/litellm-config.yaml
|
|
|
|
version: 1
|
|
|
|
# Default provider for LLM calls
|
|
default_provider: openai
|
|
|
|
# Provider configurations
|
|
providers:
|
|
openai:
|
|
api_key: ${OPENAI_API_KEY}
|
|
api_base: https://api.openai.com/v1
|
|
|
|
anthropic:
|
|
api_key: ${ANTHROPIC_API_KEY}
|
|
|
|
ollama:
|
|
api_base: http://localhost:11434
|
|
|
|
azure:
|
|
api_key: ${AZURE_API_KEY}
|
|
api_base: ${AZURE_API_BASE}
|
|
|
|
# LLM model configurations
|
|
llm_models:
|
|
default:
|
|
provider: openai
|
|
model: gpt-4o
|
|
fast:
|
|
provider: openai
|
|
model: gpt-4o-mini
|
|
claude:
|
|
provider: anthropic
|
|
model: claude-sonnet-4-20250514
|
|
local:
|
|
provider: ollama
|
|
model: llama3.2
|
|
|
|
# Embedding model configurations
|
|
embedding_models:
|
|
default:
|
|
provider: openai
|
|
model: text-embedding-3-small
|
|
dimensions: 1536
|
|
large:
|
|
provider: openai
|
|
model: text-embedding-3-large
|
|
dimensions: 3072
|
|
ada:
|
|
provider: openai
|
|
model: text-embedding-ada-002
|
|
dimensions: 1536
|