feat: 更新 LiteLLM 客户端和 CLI 设置管理,支持自定义 API 路由和 CLI 工具集成

This commit is contained in:
catlog22
2026-01-12 10:28:42 +08:00
parent 1044886e7d
commit bdd545727b
5 changed files with 133 additions and 27 deletions

View File

@@ -93,6 +93,18 @@ class LiteLLMClient(AbstractLLMClient):
if provider in ["anthropic", "azure", "vertex_ai", "bedrock"]:
return f"{provider}/{model}"
# If there's a custom api_base, use openai/ prefix to force OpenAI-compatible routing
# This prevents LiteLLM from auto-detecting model provider from name
# (e.g., "gemini-2.5-pro" would otherwise trigger Vertex AI auth)
if self._provider_config.api_base:
# Check if it's not the default OpenAI endpoint
default_openai_bases = [
"https://api.openai.com/v1",
"https://api.openai.com",
]
if self._provider_config.api_base not in default_openai_bases:
return f"openai/{model}"
return model
def chat(
@@ -120,6 +132,13 @@ class LiteLLMClient(AbstractLLMClient):
# Merge kwargs
completion_kwargs = {**self._litellm_kwargs, **kwargs}
# Override User-Agent to avoid being blocked by some API proxies
# that detect and block OpenAI SDK's default User-Agent
if "extra_headers" not in completion_kwargs:
completion_kwargs["extra_headers"] = {}
if "User-Agent" not in completion_kwargs["extra_headers"]:
completion_kwargs["extra_headers"]["User-Agent"] = "python-httpx/0.27"
try:
# Call LiteLLM
response = litellm.completion(

View File

@@ -122,17 +122,17 @@ export function saveEndpointSettings(request: SaveEndpointRequest): SettingsOper
saveIndex(index);
// Sync with cli-tools.json for ccw cli --tool integration
// API endpoints are added as tools with type: 'api-endpoint'
// Usage: ccw cli -p "..." --tool custom --model <endpoint-id> --mode analysis
// CLI Settings endpoints are added as tools with type: 'cli-wrapper'
// Usage: ccw cli -p "..." --tool <name> --mode analysis
try {
const projectDir = os.homedir(); // Use home dir as base for global config
addClaudeCustomEndpoint(projectDir, {
id: endpointId,
name: request.name,
enabled: request.enabled ?? true
// No cli-wrapper tag -> registers as type: 'api-endpoint'
enabled: request.enabled ?? true,
tags: ['cli-wrapper'] // cli-wrapper tag -> registers as type: 'cli-wrapper'
});
console.log(`[CliSettings] Synced endpoint ${endpointId} to cli-tools.json tools`);
console.log(`[CliSettings] Synced endpoint ${endpointId} to cli-tools.json tools (cli-wrapper)`);
} catch (syncError) {
console.warn(`[CliSettings] Failed to sync with cli-tools.json: ${syncError}`);
// Non-fatal: continue even if sync fails
@@ -303,14 +303,14 @@ export function toggleEndpointEnabled(endpointId: string, enabled: boolean): Set
index.set(endpointId, metadata);
saveIndex(index);
// Sync enabled status with cli-tools.json tools (api-endpoint type)
// Sync enabled status with cli-tools.json tools (cli-wrapper type)
try {
const projectDir = os.homedir();
addClaudeCustomEndpoint(projectDir, {
id: endpointId,
name: metadata.name,
enabled: enabled
// No cli-wrapper tag -> updates as type: 'api-endpoint'
enabled: enabled,
tags: ['cli-wrapper'] // cli-wrapper tag -> registers as type: 'cli-wrapper'
});
console.log(`[CliSettings] Synced endpoint ${endpointId} enabled=${enabled} to cli-tools.json tools`);
} catch (syncError) {

View File

@@ -18,6 +18,10 @@ import type {
CodexLensEmbeddingProvider,
EmbeddingPoolConfig,
} from '../types/litellm-api-config.js';
import {
addClaudeApiEndpoint,
removeClaudeApiEndpoint
} from '../tools/claude-cli-tools.js';
/**
* Default configuration
@@ -270,6 +274,20 @@ export function addEndpoint(
config.endpoints.push(endpoint);
saveConfig(baseDir, config);
// Sync to cli-tools.json as api-endpoint type
// Usage: ccw cli -p "..." --tool <endpoint-id>
try {
addClaudeApiEndpoint(homedir(), {
id: endpoint.id,
name: endpoint.id, // Use endpoint ID as tool name for CLI access
enabled: endpoint.enabled !== false
});
console.log(`[LiteLLM Config] Synced endpoint ${endpoint.id} to cli-tools.json (api-endpoint)`);
} catch (syncError) {
console.warn(`[LiteLLM Config] Failed to sync endpoint to cli-tools.json: ${syncError}`);
// Non-fatal: continue even if sync fails
}
return endpoint;
}
@@ -300,7 +318,21 @@ export function updateEndpoint(
};
saveConfig(baseDir, config);
return config.endpoints[endpointIndex];
// Sync enabled status to cli-tools.json
const updatedEndpoint = config.endpoints[endpointIndex];
try {
addClaudeApiEndpoint(homedir(), {
id: updatedEndpoint.id,
name: updatedEndpoint.id,
enabled: updatedEndpoint.enabled !== false
});
console.log(`[LiteLLM Config] Synced endpoint ${updatedEndpoint.id} update to cli-tools.json`);
} catch (syncError) {
console.warn(`[LiteLLM Config] Failed to sync endpoint update to cli-tools.json: ${syncError}`);
}
return updatedEndpoint;
}
/**
@@ -322,6 +354,15 @@ export function deleteEndpoint(baseDir: string, endpointId: string): boolean {
}
saveConfig(baseDir, config);
// Remove from cli-tools.json
try {
removeClaudeApiEndpoint(homedir(), endpointId);
console.log(`[LiteLLM Config] Removed endpoint ${endpointId} from cli-tools.json`);
} catch (syncError) {
console.warn(`[LiteLLM Config] Failed to remove endpoint from cli-tools.json: ${syncError}`);
}
return true;
}

View File

@@ -22,8 +22,18 @@ export interface ClaudeCliTool {
primaryModel?: string;
secondaryModel?: string;
tags: string[];
type?: 'builtin' | 'cli-wrapper' | 'api-endpoint'; // Tool type: builtin, cli-wrapper, or api-endpoint
id?: string; // Required for api-endpoint type (endpoint ID for settings lookup)
/**
* Tool type determines routing:
* - 'builtin': Built-in CLI tools (gemini, qwen, codex, etc.)
* - 'cli-wrapper': Routes to `claude --settings` (CLI Settings endpoints)
* - 'api-endpoint': Routes to LiteLLM (LiteLLM endpoints)
*/
type?: 'builtin' | 'cli-wrapper' | 'api-endpoint';
/**
* Endpoint ID for type: 'api-endpoint'
* Used to lookup endpoint configuration in litellm-api-config.json
*/
id?: string;
}
export type CliToolName = 'gemini' | 'qwen' | 'codex' | 'claude' | 'opencode' | string;

View File

@@ -468,39 +468,75 @@ async function executeCliTool(
}
}
// Check tools with type: 'api-endpoint' (for --tool custom --model <id>)
// Check tools with type: 'api-endpoint' -> route to LiteLLM
const apiEndpointTool = Object.entries(cliToolsConfig.tools).find(
([name, t]) => t.type === 'api-endpoint' && t.enabled &&
(t.id === tool || name === tool || name.toLowerCase() === tool.toLowerCase())
);
if (apiEndpointTool) {
const [toolName, toolConfig] = apiEndpointTool;
const endpointId = toolConfig.id || toolName;
// Check if there's a corresponding CLI封装 settings file
const cliSettingsForEndpoint = findEndpoint(endpointId);
if (cliSettingsForEndpoint) {
const settingsPath = getSettingsFilePath(cliSettingsForEndpoint.id);
// id field is the LiteLLM endpoint ID (e.g., "g25")
const litellmEndpointId = toolConfig.id || toolName;
// Find LiteLLM endpoint configuration
const litellmEndpoint = findEndpointById(workingDir, litellmEndpointId);
if (litellmEndpoint) {
if (onOutput) {
onOutput({
type: 'stderr',
content: `[Routing to API endpoint: ${toolName} via claude --settings]\n`,
content: `[Routing to LiteLLM API endpoint: ${toolName} (${litellmEndpointId})]\n`,
timestamp: new Date().toISOString()
});
}
const result = await executeClaudeWithSettings({
// Execute via LiteLLM
const result = await executeLiteLLMEndpoint({
prompt,
settingsPath,
endpointId: cliSettingsForEndpoint.id,
mode,
workingDir,
cd,
endpointId: litellmEndpointId,
baseDir: workingDir,
cwd: cd || workingDir,
includeDirs: includeDirs ? includeDirs.split(',').map(d => d.trim()) : undefined,
customId,
onOutput: onOutput || undefined
onOutput: onOutput || undefined,
});
return result;
// Convert LiteLLM result to ExecutionOutput format
const startTime = Date.now();
const endTime = Date.now();
const duration = endTime - startTime;
const execution: ExecutionRecord = {
id: customId || `${Date.now()}-litellm`,
timestamp: new Date(startTime).toISOString(),
tool: toolName,
model: litellmEndpoint.model,
mode,
prompt,
status: result.success ? 'success' : 'error',
exit_code: result.success ? 0 : 1,
duration_ms: duration,
output: {
stdout: result.output,
stderr: result.error || '',
truncated: false
}
};
const conversation = convertToConversation(execution);
// Try to save to history
try {
saveConversation(workingDir, conversation);
} catch (err) {
console.error('[CLI Executor] Failed to save LiteLLM history:', (err as Error).message);
}
return {
success: result.success,
execution,
conversation,
stdout: result.output,
stderr: result.error || '',
};
}
}