Refactor CLI Config Manager and Add Provider Model Routes

- Removed deprecated constants and functions from cli-config-manager.ts.
- Introduced new provider model presets in litellm-provider-models.ts for better organization and management of model information.
- Created provider-routes.ts to handle API endpoints for retrieving provider information and models.
- Added integration tests for provider routes to ensure correct functionality and response structure.
- Implemented unit tests for settings persistence functions, covering various scenarios and edge cases.
- Enhanced error handling and validation in the new routes and settings functions.
This commit is contained in:
catlog22
2026-01-25 17:27:58 +08:00
parent 7c16cc6427
commit 985085c624
13 changed files with 1252 additions and 300 deletions

View File

@@ -0,0 +1,222 @@
/**
* Provider Model Presets
*
* Predefined model information for each supported LLM provider.
* Used for UI dropdowns and validation.
*/
import type { ProviderType } from '../types/litellm-api-config.js';
/**
* Model information metadata
*/
export interface ModelInfo {
/** Model identifier (used in API calls) */
id: string;
/** Human-readable display name */
name: string;
/** Context window size in tokens */
contextWindow: number;
/** Whether this model supports prompt caching */
supportsCaching: boolean;
}
/**
* Embedding model information metadata
*/
export interface EmbeddingModelInfo {
/** Model identifier (used in API calls) */
id: string;
/** Human-readable display name */
name: string;
/** Embedding dimensions */
dimensions: number;
/** Maximum input tokens */
maxTokens: number;
/** Provider identifier */
provider: string;
}
/**
* Predefined models for each API format
* Used for UI selection and validation
* Note: Most providers use OpenAI-compatible format
*/
export const PROVIDER_MODELS: Record<ProviderType, ModelInfo[]> = {
// OpenAI-compatible format (used by OpenAI, DeepSeek, Ollama, etc.)
openai: [
{
id: 'gpt-4o',
name: 'GPT-4o',
contextWindow: 128000,
supportsCaching: true
},
{
id: 'gpt-4o-mini',
name: 'GPT-4o Mini',
contextWindow: 128000,
supportsCaching: true
},
{
id: 'o1',
name: 'O1',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'deepseek-chat',
name: 'DeepSeek Chat',
contextWindow: 64000,
supportsCaching: false
},
{
id: 'deepseek-coder',
name: 'DeepSeek Coder',
contextWindow: 64000,
supportsCaching: false
},
{
id: 'llama3.2',
name: 'Llama 3.2',
contextWindow: 128000,
supportsCaching: false
},
{
id: 'qwen2.5-coder',
name: 'Qwen 2.5 Coder',
contextWindow: 32000,
supportsCaching: false
}
],
// Anthropic format
anthropic: [
{
id: 'claude-sonnet-4-20250514',
name: 'Claude Sonnet 4',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-5-sonnet-20241022',
name: 'Claude 3.5 Sonnet',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-5-haiku-20241022',
name: 'Claude 3.5 Haiku',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-opus-20240229',
name: 'Claude 3 Opus',
contextWindow: 200000,
supportsCaching: false
}
],
// Custom format
custom: [
{
id: 'custom-model',
name: 'Custom Model',
contextWindow: 128000,
supportsCaching: false
}
]
};
/**
* Get models for a specific provider
* @param providerType - Provider type to get models for
* @returns Array of model information
*/
export function getModelsForProvider(providerType: ProviderType): ModelInfo[] {
return PROVIDER_MODELS[providerType] || [];
}
/**
* Predefined embedding models for each API format
* Used for UI selection and validation
*/
export const EMBEDDING_MODELS: Record<ProviderType, EmbeddingModelInfo[]> = {
// OpenAI embedding models
openai: [
{
id: 'text-embedding-3-small',
name: 'Text Embedding 3 Small',
dimensions: 1536,
maxTokens: 8191,
provider: 'openai'
},
{
id: 'text-embedding-3-large',
name: 'Text Embedding 3 Large',
dimensions: 3072,
maxTokens: 8191,
provider: 'openai'
},
{
id: 'text-embedding-ada-002',
name: 'Ada 002',
dimensions: 1536,
maxTokens: 8191,
provider: 'openai'
}
],
// Anthropic doesn't have embedding models
anthropic: [],
// Custom embedding models
custom: [
{
id: 'custom-embedding',
name: 'Custom Embedding',
dimensions: 1536,
maxTokens: 8192,
provider: 'custom'
}
]
};
/**
* Get embedding models for a specific provider
* @param providerType - Provider type to get embedding models for
* @returns Array of embedding model information
*/
export function getEmbeddingModelsForProvider(providerType: ProviderType): EmbeddingModelInfo[] {
return EMBEDDING_MODELS[providerType] || [];
}
/**
* Get model information by ID within a provider
* @param providerType - Provider type
* @param modelId - Model identifier
* @returns Model information or undefined if not found
*/
export function getModelInfo(providerType: ProviderType, modelId: string): ModelInfo | undefined {
const models = PROVIDER_MODELS[providerType] || [];
return models.find(m => m.id === modelId);
}
/**
* Validate if a model ID is supported by a provider
* @param providerType - Provider type
* @param modelId - Model identifier to validate
* @returns true if model is valid for provider
*/
export function isValidModel(providerType: ProviderType, modelId: string): boolean {
return getModelInfo(providerType, modelId) !== undefined;
}

View File

@@ -1,222 +1,123 @@
/**
* Provider Model Presets
* CLI Tool Model Reference Library
*
* Predefined model information for each supported LLM provider.
* Used for UI dropdowns and validation.
* System reference for available models per CLI tool provider.
* This is a read-only reference, NOT user configuration.
* User configuration is managed via tools.{tool}.primaryModel/secondaryModel in cli-tools.json
*/
import type { ProviderType } from '../types/litellm-api-config.js';
/**
* Model information metadata
*/
export interface ModelInfo {
/** Model identifier (used in API calls) */
export interface ProviderModelInfo {
id: string;
/** Human-readable display name */
name: string;
capabilities?: string[];
contextWindow?: number;
deprecated?: boolean;
}
/** Context window size in tokens */
contextWindow: number;
/** Whether this model supports prompt caching */
supportsCaching: boolean;
export interface ProviderInfo {
name: string;
models: ProviderModelInfo[];
}
/**
* Embedding model information metadata
* System reference for CLI tool models
* Maps provider names to their available models
*/
export interface EmbeddingModelInfo {
/** Model identifier (used in API calls) */
id: string;
/** Human-readable display name */
name: string;
/** Embedding dimensions */
dimensions: number;
/** Maximum input tokens */
maxTokens: number;
/** Provider identifier */
provider: string;
}
/**
* Predefined models for each API format
* Used for UI selection and validation
* Note: Most providers use OpenAI-compatible format
*/
export const PROVIDER_MODELS: Record<ProviderType, ModelInfo[]> = {
// OpenAI-compatible format (used by OpenAI, DeepSeek, Ollama, etc.)
openai: [
{
id: 'gpt-4o',
name: 'GPT-4o',
contextWindow: 128000,
supportsCaching: true
},
{
id: 'gpt-4o-mini',
name: 'GPT-4o Mini',
contextWindow: 128000,
supportsCaching: true
},
{
id: 'o1',
name: 'O1',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'deepseek-chat',
name: 'DeepSeek Chat',
contextWindow: 64000,
supportsCaching: false
},
{
id: 'deepseek-coder',
name: 'DeepSeek Coder',
contextWindow: 64000,
supportsCaching: false
},
{
id: 'llama3.2',
name: 'Llama 3.2',
contextWindow: 128000,
supportsCaching: false
},
{
id: 'qwen2.5-coder',
name: 'Qwen 2.5 Coder',
contextWindow: 32000,
supportsCaching: false
}
],
// Anthropic format
anthropic: [
{
id: 'claude-sonnet-4-20250514',
name: 'Claude Sonnet 4',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-5-sonnet-20241022',
name: 'Claude 3.5 Sonnet',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-5-haiku-20241022',
name: 'Claude 3.5 Haiku',
contextWindow: 200000,
supportsCaching: true
},
{
id: 'claude-3-opus-20240229',
name: 'Claude 3 Opus',
contextWindow: 200000,
supportsCaching: false
}
],
// Custom format
custom: [
{
id: 'custom-model',
name: 'Custom Model',
contextWindow: 128000,
supportsCaching: false
}
]
};
export const PROVIDER_MODELS: Record<string, ProviderInfo> = {
google: {
name: 'Google AI',
models: [
{ id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro', capabilities: ['text', 'vision', 'code'], contextWindow: 1000000 },
{ id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash', capabilities: ['text', 'code'], contextWindow: 1000000 },
{ id: 'gemini-2.0-flash', name: 'Gemini 2.0 Flash', capabilities: ['text'], contextWindow: 1000000 },
{ id: 'gemini-1.5-pro', name: 'Gemini 1.5 Pro', capabilities: ['text', 'vision'], contextWindow: 2000000 },
{ id: 'gemini-1.5-flash', name: 'Gemini 1.5 Flash', capabilities: ['text'], contextWindow: 1000000 }
]
},
qwen: {
name: 'Qwen',
models: [
{ id: 'coder-model', name: 'Qwen Coder', capabilities: ['code'] },
{ id: 'vision-model', name: 'Qwen Vision', capabilities: ['vision'] },
{ id: 'qwen2.5-coder-32b', name: 'Qwen 2.5 Coder 32B', capabilities: ['code'] }
]
},
openai: {
name: 'OpenAI',
models: [
{ id: 'gpt-5.2', name: 'GPT-5.2', capabilities: ['text', 'code'] },
{ id: 'gpt-4.1', name: 'GPT-4.1', capabilities: ['text', 'code'] },
{ id: 'o4-mini', name: 'O4 Mini', capabilities: ['text'] },
{ id: 'o3', name: 'O3', capabilities: ['text'] }
]
},
anthropic: {
name: 'Anthropic',
models: [
{ id: 'sonnet', name: 'Claude Sonnet', capabilities: ['text', 'code'] },
{ id: 'opus', name: 'Claude Opus', capabilities: ['text', 'code', 'vision'] },
{ id: 'haiku', name: 'Claude Haiku', capabilities: ['text'] },
{ id: 'claude-sonnet-4-5-20250929', name: 'Claude 4.5 Sonnet (2025-09-29)', capabilities: ['text', 'code'] },
{ id: 'claude-opus-4-5-20251101', name: 'Claude 4.5 Opus (2025-11-01)', capabilities: ['text', 'code', 'vision'] }
]
},
litellm: {
name: 'LiteLLM Aggregator',
models: [
{ id: 'opencode/glm-4.7-free', name: 'GLM-4.7 Free', capabilities: ['text'] },
{ id: 'opencode/gpt-5-nano', name: 'GPT-5 Nano', capabilities: ['text'] },
{ id: 'opencode/grok-code', name: 'Grok Code', capabilities: ['code'] },
{ id: 'opencode/minimax-m2.1-free', name: 'MiniMax M2.1 Free', capabilities: ['text'] },
{ id: 'anthropic/claude-sonnet-4-20250514', name: 'Claude Sonnet 4 (via LiteLLM)', capabilities: ['text'] },
{ id: 'anthropic/claude-opus-4-20250514', name: 'Claude Opus 4 (via LiteLLM)', capabilities: ['text'] },
{ id: 'openai/gpt-4.1', name: 'GPT-4.1 (via LiteLLM)', capabilities: ['text'] },
{ id: 'openai/o3', name: 'O3 (via LiteLLM)', capabilities: ['text'] },
{ id: 'google/gemini-2.5-pro', name: 'Gemini 2.5 Pro (via LiteLLM)', capabilities: ['text'] },
{ id: 'google/gemini-2.5-flash', name: 'Gemini 2.5 Flash (via LiteLLM)', capabilities: ['text'] }
]
}
} as const;
/**
* Get models for a specific provider
* @param providerType - Provider type to get models for
* @param provider - Provider name (e.g., 'google', 'qwen', 'openai', 'anthropic', 'litellm')
* @returns Array of model information
*/
export function getModelsForProvider(providerType: ProviderType): ModelInfo[] {
return PROVIDER_MODELS[providerType] || [];
export function getProviderModels(provider: string): ProviderModelInfo[] {
return PROVIDER_MODELS[provider]?.models || [];
}
/**
* Predefined embedding models for each API format
* Used for UI selection and validation
* Get all provider names
* @returns Array of provider names
*/
export const EMBEDDING_MODELS: Record<ProviderType, EmbeddingModelInfo[]> = {
// OpenAI embedding models
openai: [
{
id: 'text-embedding-3-small',
name: 'Text Embedding 3 Small',
dimensions: 1536,
maxTokens: 8191,
provider: 'openai'
},
{
id: 'text-embedding-3-large',
name: 'Text Embedding 3 Large',
dimensions: 3072,
maxTokens: 8191,
provider: 'openai'
},
{
id: 'text-embedding-ada-002',
name: 'Ada 002',
dimensions: 1536,
maxTokens: 8191,
provider: 'openai'
}
],
// Anthropic doesn't have embedding models
anthropic: [],
// Custom embedding models
custom: [
{
id: 'custom-embedding',
name: 'Custom Embedding',
dimensions: 1536,
maxTokens: 8192,
provider: 'custom'
}
]
};
/**
* Get embedding models for a specific provider
* @param providerType - Provider type to get embedding models for
* @returns Array of embedding model information
*/
export function getEmbeddingModelsForProvider(providerType: ProviderType): EmbeddingModelInfo[] {
return EMBEDDING_MODELS[providerType] || [];
export function getAllProviders(): string[] {
return Object.keys(PROVIDER_MODELS);
}
/**
* Get model information by ID within a provider
* @param providerType - Provider type
* @param modelId - Model identifier
* Find model information across all providers
* @param modelId - Model identifier to search for
* @returns Model information or undefined if not found
*/
export function getModelInfo(providerType: ProviderType, modelId: string): ModelInfo | undefined {
const models = PROVIDER_MODELS[providerType] || [];
return models.find(m => m.id === modelId);
export function findModelInfo(modelId: string): ProviderModelInfo | undefined {
for (const provider of Object.values(PROVIDER_MODELS)) {
const model = provider.models.find(m => m.id === modelId);
if (model) return model;
}
return undefined;
}
/**
* Validate if a model ID is supported by a provider
* @param providerType - Provider type
* @param modelId - Model identifier to validate
* @returns true if model is valid for provider
* Get provider name for a model ID
* @param modelId - Model identifier
* @returns Provider name or undefined if not found
*/
export function isValidModel(providerType: ProviderType, modelId: string): boolean {
return getModelInfo(providerType, modelId) !== undefined;
export function getProviderForModel(modelId: string): string | undefined {
for (const [providerId, provider] of Object.entries(PROVIDER_MODELS)) {
if (provider.models.some(m => m.id === modelId)) {
return providerId;
}
}
return undefined;
}

View File

@@ -8,7 +8,7 @@
import { homedir } from 'os';
import { join, resolve, dirname, relative, sep } from 'path';
import { createHash } from 'crypto';
import { existsSync, mkdirSync, renameSync, rmSync, readdirSync } from 'fs';
import { existsSync, mkdirSync, renameSync, rmSync, readdirSync, cpSync } from 'fs';
import { readdir } from 'fs/promises';
// Environment variable override for custom storage location
@@ -211,14 +211,29 @@ function migrateToHierarchical(legacyDir: string, targetDir: string): void {
const target = join(targetDir, subDir);
if (existsSync(source)) {
// Use atomic rename (same filesystem)
// Try atomic rename first (fastest, same filesystem)
try {
renameSync(source, target);
console.log(` ✓ 迁移 ${subDir}`);
} catch (error: any) {
// If rename fails (cross-filesystem), fallback to copy-delete
// For now, we'll just throw the error
throw new Error(`无法迁移 ${subDir}: ${error.message}`);
// If rename fails (EPERM, cross-filesystem, etc.), fallback to copy-delete
if (error.code === 'EPERM' || error.code === 'EXDEV' || error.code === 'EBUSY') {
try {
console.log(` ⚠️ rename 失败,使用 copy-delete 方式迁移 ${subDir}...`);
cpSync(source, target, { recursive: true, force: true });
// Verify copy succeeded before deleting source
if (existsSync(target)) {
rmSync(source, { recursive: true, force: true });
console.log(` ✓ 迁移 ${subDir} (copy-delete)`);
} else {
throw new Error('复制失败:目标目录不存在');
}
} catch (copyError: any) {
throw new Error(`无法迁移 ${subDir}: ${copyError.message}`);
}
} else {
throw new Error(`无法迁移 ${subDir}: ${error.message}`);
}
}
}
}

View File

@@ -29,8 +29,7 @@ import {
loadCliConfig,
getToolConfig,
updateToolConfig,
getFullConfigResponse,
PREDEFINED_MODELS
getFullConfigResponse
} from '../../tools/cli-config-manager.js';
import {
loadClaudeCliTools,

View File

@@ -0,0 +1,78 @@
/**
* Provider Reference Routes Module
* Handles read-only provider model reference API endpoints
*/
import type { RouteContext } from './types.js';
import {
PROVIDER_MODELS,
getAllProviders,
getProviderModels
} from '../../config/provider-models.js';
/**
* Handle Provider Reference routes
* @returns true if route was handled, false otherwise
*/
export async function handleProviderRoutes(ctx: RouteContext): Promise<boolean> {
const { pathname, req, res } = ctx;
// ========== GET ALL PROVIDERS ==========
// GET /api/providers
if (pathname === '/api/providers' && req.method === 'GET') {
try {
const providers = getAllProviders().map(id => ({
id,
name: PROVIDER_MODELS[id].name,
modelCount: PROVIDER_MODELS[id].models.length
}));
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: true, providers }));
} catch (err) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: (err as Error).message
}));
}
return true;
}
// ========== GET MODELS FOR PROVIDER ==========
// GET /api/providers/:provider/models
const providerMatch = pathname.match(/^\/api\/providers\/([^\/]+)\/models$/);
if (providerMatch && req.method === 'GET') {
const provider = decodeURIComponent(providerMatch[1]);
try {
const models = getProviderModels(provider);
if (models.length === 0) {
res.writeHead(404, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: `Provider not found: ${provider}`
}));
return true;
}
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: true,
provider,
providerName: PROVIDER_MODELS[provider].name,
models
}));
} catch (err) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: (err as Error).message
}));
}
return true;
}
return false;
}

View File

@@ -8,6 +8,7 @@ import { resolvePath, getRecentPaths, normalizePathForDisplay } from '../utils/p
import { handleStatusRoutes } from './routes/status-routes.js';
import { handleCliRoutes, cleanupStaleExecutions } from './routes/cli-routes.js';
import { handleCliSettingsRoutes } from './routes/cli-settings-routes.js';
import { handleProviderRoutes } from './routes/provider-routes.js';
import { handleMemoryRoutes } from './routes/memory-routes.js';
import { handleCoreMemoryRoutes } from './routes/core-memory-routes.js';
import { handleMcpRoutes } from './routes/mcp-routes.js';
@@ -518,6 +519,11 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
if (await handleCliRoutes(routeContext)) return;
}
// Provider routes (/api/providers/*)
if (pathname.startsWith('/api/providers')) {
if (await handleProviderRoutes(routeContext)) return;
}
// Claude CLAUDE.md routes (/api/memory/claude/*) and Language routes (/api/language/*)
if (pathname.startsWith('/api/memory/claude/') || pathname.startsWith('/api/language/')) {
if (await handleClaudeRoutes(routeContext)) return;

View File

@@ -80,7 +80,6 @@ export interface ClaudeCacheSettings {
export interface ClaudeCliToolsConfig {
$schema?: string;
version: string;
models?: Record<string, string[]>; // PREDEFINED_MODELS
tools: Record<string, ClaudeCliTool>; // All tools: builtin, cli-wrapper, api-endpoint
apiEndpoints?: ClaudeApiEndpoint[]; // @deprecated Use tools with type: 'api-endpoint' instead
customEndpoints?: ClaudeCustomEndpoint[]; // @deprecated Use tools with type: 'cli-wrapper' or 'api-endpoint' instead
@@ -100,6 +99,8 @@ export interface ClaudeCliSettingsConfig {
recursiveQuery: boolean;
cache: ClaudeCacheSettings;
codeIndexMcp: 'codexlens' | 'ace' | 'none';
defaultModel?: string;
autoSyncEnabled?: boolean;
}
// Legacy combined config (for backward compatibility)
@@ -120,29 +121,8 @@ export interface ClaudeCliCombinedConfig extends ClaudeCliToolsConfig {
// ========== Default Config ==========
// Predefined models for each tool
const PREDEFINED_MODELS: Record<CliToolName, string[]> = {
gemini: ['gemini-2.5-pro', 'gemini-2.5-flash', 'gemini-2.0-flash', 'gemini-1.5-pro', 'gemini-1.5-flash'],
qwen: ['coder-model', 'vision-model', 'qwen2.5-coder-32b'],
codex: ['gpt-5.2', 'gpt-4.1', 'o4-mini', 'o3'],
claude: ['sonnet', 'opus', 'haiku', 'claude-sonnet-4-5-20250929', 'claude-opus-4-5-20251101'],
opencode: [
'opencode/glm-4.7-free',
'opencode/gpt-5-nano',
'opencode/grok-code',
'opencode/minimax-m2.1-free',
'anthropic/claude-sonnet-4-20250514',
'anthropic/claude-opus-4-20250514',
'openai/gpt-4.1',
'openai/o3',
'google/gemini-2.5-pro',
'google/gemini-2.5-flash'
]
};
const DEFAULT_TOOLS_CONFIG: ClaudeCliToolsConfig = {
version: '3.2.0',
models: { ...PREDEFINED_MODELS },
version: '3.3.0',
tools: {
gemini: {
enabled: true,
@@ -260,6 +240,28 @@ function resolveSettingsPath(projectDir: string): { path: string; source: 'proje
// ========== Main Functions ==========
/**
* Create a timestamped backup of the config file
* @param filePath - Path to the config file to backup
* @returns Path to the backup file
*/
function backupConfigFile(filePath: string): string {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').split('T')[0] + '-' +
new Date().toISOString().replace(/[:.]/g, '-').split('T')[1].substring(0, 8);
const backupPath = `${filePath}.${timestamp}.bak`;
try {
if (fs.existsSync(filePath)) {
fs.copyFileSync(filePath, backupPath);
debugLog(`[claude-cli-tools] Created backup: ${backupPath}`);
}
return backupPath;
} catch (err) {
console.warn('[claude-cli-tools] Failed to create backup:', err);
return '';
}
}
/**
* Ensure tool has required fields (for backward compatibility)
*/
@@ -274,18 +276,31 @@ function ensureToolTags(tool: Partial<ClaudeCliTool>): ClaudeCliTool {
}
/**
* Migrate config from older versions to v3.2.0
* Migrate config from older versions to v3.3.0
* v3.2.0: All endpoints (cli-wrapper, api-endpoint) are in tools with type field
* v3.3.0: Remove models field (moved to system reference)
*/
function migrateConfig(config: any, projectDir: string): ClaudeCliToolsConfig {
function migrateConfig(config: any, projectDir: string, configPath?: string): ClaudeCliToolsConfig {
const version = parseFloat(config.version || '1.0');
let needsMigration = false;
// Already v3.2+, no migration needed
if (version >= 3.2) {
// Check if models field exists (v3.3.0 migration)
if (config.models) {
needsMigration = true;
debugLog('[claude-cli-tools] Detected models field, will remove (moved to system reference)');
}
// Already v3.3+, no migration needed
if (version >= 3.3 && !needsMigration) {
return config as ClaudeCliToolsConfig;
}
debugLog(`[claude-cli-tools] Migrating config from v${config.version || '1.0'} to v3.2.0`);
// Create backup before migration if config path is provided
if (configPath && (version < 3.3 || needsMigration)) {
backupConfigFile(configPath);
}
debugLog(`[claude-cli-tools] Migrating config from v${config.version || '1.0'} to v3.3.0`);
// Try to load legacy cli-config.json for model data
let legacyCliConfig: any = null;
@@ -372,9 +387,13 @@ function migrateConfig(config: any, projectDir: string): ClaudeCliToolsConfig {
}
}
// Remove models field if it exists (v3.3.0 migration)
if (config.models) {
debugLog('[claude-cli-tools] Removed models field (moved to system reference)');
}
return {
version: '3.2.0',
models: { ...PREDEFINED_MODELS },
version: '3.3.0',
tools: migratedTools,
$schema: config.$schema
};
@@ -485,9 +504,8 @@ export function loadClaudeCliTools(projectDir: string): ClaudeCliToolsConfig & {
const content = fs.readFileSync(resolved.path, 'utf-8');
const parsed = JSON.parse(content) as Partial<ClaudeCliCombinedConfig>;
// Migrate older versions to v3.2.0
const migrated = migrateConfig(parsed, projectDir);
const needsSave = migrated.version !== parsed.version;
// Migrate older versions to v3.3.0 (pass config path for backup)
const migrated = migrateConfig(parsed, projectDir, resolved.path);
// Load user-configured tools only (defaults NOT merged)
const mergedTools: Record<string, ClaudeCliTool> = {};
@@ -501,14 +519,15 @@ export function loadClaudeCliTools(projectDir: string): ClaudeCliToolsConfig & {
const config: ClaudeCliToolsConfig & { _source?: string } = {
version: migrated.version || DEFAULT_TOOLS_CONFIG.version,
models: migrated.models || DEFAULT_TOOLS_CONFIG.models,
tools: mergedTools,
$schema: migrated.$schema,
_source: resolved.source
};
// Save migrated config if version changed
if (needsSave) {
// Save migrated config if version changed or models field exists
const needsVersionUpdate = migrated.version !== (parsed as any).version;
const hasModelsField = (parsed as any).models !== undefined;
if (needsVersionUpdate || hasModelsField) {
try {
saveClaudeCliTools(projectDir, config);
debugLog(`[claude-cli-tools] Saved migrated config to: ${resolved.path}`);
@@ -674,6 +693,161 @@ export function getDefaultTool(projectDir: string): string {
}
}
// ========== Settings Persistence Functions ==========
/**
* Update prompt format setting
* @param projectDir - Project directory path
* @param format - Prompt format: 'plain' | 'yaml' | 'json'
* @returns Updated settings config
*/
export function setPromptFormat(
projectDir: string,
format: 'plain' | 'yaml' | 'json'
): ClaudeCliSettingsConfig {
const settings = loadClaudeCliSettings(projectDir);
settings.promptFormat = format;
saveClaudeCliSettings(projectDir, settings);
return settings;
}
/**
* Get prompt format setting
* @param projectDir - Project directory path
* @returns Current prompt format or 'plain' as fallback
*/
export function getPromptFormat(projectDir: string): 'plain' | 'yaml' | 'json' {
try {
const settings = loadClaudeCliSettings(projectDir);
return settings.promptFormat || 'plain';
} catch {
return 'plain';
}
}
/**
* Update default model setting
* @param projectDir - Project directory path
* @param model - Default model name
* @returns Updated settings config
*/
export function setDefaultModel(
projectDir: string,
model: string
): ClaudeCliSettingsConfig {
const settings = loadClaudeCliSettings(projectDir);
settings.defaultModel = model;
saveClaudeCliSettings(projectDir, settings);
return settings;
}
/**
* Get default model setting
* @param projectDir - Project directory path
* @returns Current default model or undefined if not set
*/
export function getDefaultModel(projectDir: string): string | undefined {
try {
const settings = loadClaudeCliSettings(projectDir);
return settings.defaultModel;
} catch {
return undefined;
}
}
/**
* Update auto-sync enabled setting
* @param projectDir - Project directory path
* @param enabled - Whether auto-sync is enabled
* @returns Updated settings config
*/
export function setAutoSyncEnabled(
projectDir: string,
enabled: boolean
): ClaudeCliSettingsConfig {
const settings = loadClaudeCliSettings(projectDir);
settings.autoSyncEnabled = enabled;
saveClaudeCliSettings(projectDir, settings);
return settings;
}
/**
* Get auto-sync enabled setting
* @param projectDir - Project directory path
* @returns Current auto-sync status or undefined if not set
*/
export function getAutoSyncEnabled(projectDir: string): boolean | undefined {
try {
const settings = loadClaudeCliSettings(projectDir);
return settings.autoSyncEnabled;
} catch {
return undefined;
}
}
/**
* Update smart context enabled setting
* @param projectDir - Project directory path
* @param enabled - Whether smart context is enabled
* @returns Updated settings config
*/
export function setSmartContextEnabled(
projectDir: string,
enabled: boolean
): ClaudeCliSettingsConfig {
const settings = loadClaudeCliSettings(projectDir);
settings.smartContext = {
...settings.smartContext,
enabled
};
saveClaudeCliSettings(projectDir, settings);
return settings;
}
/**
* Get smart context enabled setting
* @param projectDir - Project directory path
* @returns Current smart context status or false as fallback
*/
export function getSmartContextEnabled(projectDir: string): boolean {
try {
const settings = loadClaudeCliSettings(projectDir);
return settings.smartContext?.enabled ?? false;
} catch {
return false;
}
}
/**
* Update native resume setting
* @param projectDir - Project directory path
* @param enabled - Whether native resume is enabled
* @returns Updated settings config
*/
export function setNativeResume(
projectDir: string,
enabled: boolean
): ClaudeCliSettingsConfig {
const settings = loadClaudeCliSettings(projectDir);
settings.nativeResume = enabled;
saveClaudeCliSettings(projectDir, settings);
return settings;
}
/**
* Get native resume setting
* @param projectDir - Project directory path
* @returns Current native resume status or true as fallback
*/
export function getNativeResume(projectDir: string): boolean {
try {
const settings = loadClaudeCliSettings(projectDir);
return settings.nativeResume ?? true;
} catch {
return true;
}
}
/**
* Add API endpoint as a tool with type: 'api-endpoint'
* Usage: --tool <name> or --tool custom --model <id>
@@ -879,21 +1053,8 @@ export function getContextToolsPath(provider: 'codexlens' | 'ace' | 'none'): str
}
// ========== Model Configuration Functions ==========
/**
* Get predefined models for a specific tool
*/
export function getPredefinedModels(tool: string): string[] {
const toolName = tool as CliToolName;
return PREDEFINED_MODELS[toolName] ? [...PREDEFINED_MODELS[toolName]] : [];
}
/**
* Get all predefined models
*/
export function getAllPredefinedModels(): Record<string, string[]> {
return { ...PREDEFINED_MODELS };
}
// NOTE: Model reference data has been moved to system reference (src/config/provider-models.ts)
// User configuration only manages primaryModel/secondaryModel per tool via tools.{tool}
/**
* Get tool configuration (compatible with cli-config-manager interface)
@@ -995,16 +1156,15 @@ export function isToolEnabled(projectDir: string, tool: string): boolean {
}
/**
* Get full config response for API (includes predefined models)
* Get full config response for API
* Note: Provider model reference has been moved to system reference (see provider-routes.ts)
*/
export function getFullConfigResponse(projectDir: string): {
config: ClaudeCliToolsConfig;
predefinedModels: Record<string, string[]>;
} {
const config = loadClaudeCliTools(projectDir);
return {
config,
predefinedModels: { ...PREDEFINED_MODELS }
config
};
}

View File

@@ -11,8 +11,6 @@ import {
saveClaudeCliTools,
getToolConfig as getToolConfigFromClaude,
updateToolConfig as updateToolConfigFromClaude,
getPredefinedModels as getPredefinedModelsFromClaude,
getAllPredefinedModels,
getPrimaryModel as getPrimaryModelFromClaude,
getSecondaryModel as getSecondaryModelFromClaude,
isToolEnabled as isToolEnabledFromClaude,
@@ -39,27 +37,6 @@ export interface CliConfig {
export type { CliToolName };
// ========== Re-exported Constants ==========
/**
* @deprecated Use getPredefinedModels() or getAllPredefinedModels() instead
*/
export const PREDEFINED_MODELS = getAllPredefinedModels();
/**
* @deprecated Default config is now managed in claude-cli-tools.ts
*/
export const DEFAULT_CONFIG: CliConfig = {
version: 1,
tools: {
gemini: { enabled: true, primaryModel: 'gemini-2.5-pro', secondaryModel: 'gemini-2.5-flash' },
qwen: { enabled: true, primaryModel: 'coder-model', secondaryModel: 'coder-model' },
codex: { enabled: true, primaryModel: 'gpt-5.2', secondaryModel: 'gpt-5.2' },
claude: { enabled: true, primaryModel: 'sonnet', secondaryModel: 'haiku' },
opencode: { enabled: true, primaryModel: 'opencode/glm-4.7-free', secondaryModel: 'opencode/glm-4.7-free' }
}
};
// ========== Re-exported Functions ==========
/**
@@ -162,19 +139,12 @@ export function getSecondaryModel(baseDir: string, tool: string): string {
return getSecondaryModelFromClaude(baseDir, tool);
}
/**
* Get all predefined models for a tool
*/
export function getPredefinedModels(tool: string): string[] {
return getPredefinedModelsFromClaude(tool);
}
/**
* Get full config response for API
* Note: Provider model reference has been moved to system reference (see provider-routes.ts)
*/
export function getFullConfigResponse(baseDir: string): {
config: CliConfig;
predefinedModels: Record<string, string[]>;
} {
const response = getFullConfigResponseFromClaude(baseDir);
@@ -194,7 +164,6 @@ export function getFullConfigResponse(baseDir: string): {
config: {
version: parseFloat(response.config.version) || 1,
tools
},
predefinedModels: response.predefinedModels
}
};
}