feat(queue): implement queue scheduler service and API routes

- Added QueueSchedulerService to manage task queue lifecycle, including state machine, dependency resolution, and session management.
- Implemented HTTP API endpoints for queue scheduling:
  - POST /api/queue/execute: Submit items to the scheduler.
  - GET /api/queue/scheduler/state: Retrieve full scheduler state.
  - POST /api/queue/scheduler/start: Start scheduling loop with items.
  - POST /api/queue/scheduler/pause: Pause scheduling.
  - POST /api/queue/scheduler/stop: Graceful stop of the scheduler.
  - POST /api/queue/scheduler/config: Update scheduler configuration.
- Introduced types for queue items, scheduler state, and WebSocket messages to ensure type safety and compatibility with the backend.
- Added static model lists for LiteLLM as a fallback for available models.
This commit is contained in:
catlog22
2026-02-27 20:53:46 +08:00
parent 5b54f38aa3
commit 75173312c1
47 changed files with 3813 additions and 307 deletions

View File

@@ -0,0 +1,93 @@
/**
* LiteLLM Static Model Lists (Fallback)
*
* Sourced from LiteLLM's internal model lists.
* Used as fallback when user config has no availableModels defined.
*
* Last updated: 2026-02-27
* Source: Python litellm module static lists
*/
export interface ModelInfo {
id: string;
name: string;
}
/**
* Mapping from CLI tool names to LiteLLM provider model lists
*/
export const LITELLM_STATIC_MODELS: Record<string, ModelInfo[]> = {
// Gemini models (from litellm.gemini_models)
gemini: [
{ id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro' },
{ id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash' },
{ id: 'gemini-2.0-flash', name: 'Gemini 2.0 Flash' },
{ id: 'gemini-2.0-pro-exp-02-05', name: 'Gemini 2.0 Pro Exp' },
{ id: 'gemini-1.5-pro', name: 'Gemini 1.5 Pro' },
{ id: 'gemini-1.5-flash', name: 'Gemini 1.5 Flash' },
{ id: 'gemini-1.5-pro-latest', name: 'Gemini 1.5 Pro Latest' },
{ id: 'gemini-embedding-001', name: 'Gemini Embedding 001' }
],
// OpenAI models (from litellm.open_ai_chat_completion_models)
codex: [
{ id: 'gpt-5.2', name: 'GPT-5.2' },
{ id: 'gpt-5.1-chat-latest', name: 'GPT-5.1 Chat Latest' },
{ id: 'gpt-4o', name: 'GPT-4o' },
{ id: 'gpt-4o-mini', name: 'GPT-4o Mini' },
{ id: 'o4-mini-2025-04-16', name: 'O4 Mini' },
{ id: 'o3', name: 'O3' },
{ id: 'o1-mini', name: 'O1 Mini' },
{ id: 'gpt-4-turbo', name: 'GPT-4 Turbo' }
],
// Anthropic models (from litellm.anthropic_models)
claude: [
{ id: 'claude-sonnet-4-5-20250929', name: 'Claude Sonnet 4.5' },
{ id: 'claude-opus-4-5-20251101', name: 'Claude Opus 4.5' },
{ id: 'claude-opus-4-6', name: 'Claude Opus 4.6' },
{ id: 'claude-sonnet-4-20250514', name: 'Claude Sonnet 4' },
{ id: 'claude-opus-4-20250514', name: 'Claude Opus 4' },
{ id: 'claude-3-5-sonnet-20241022', name: 'Claude 3.5 Sonnet' },
{ id: 'claude-3-5-haiku-20241022', name: 'Claude 3.5 Haiku' },
{ id: 'claude-3-opus-20240229', name: 'Claude 3 Opus' },
{ id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku' },
{ id: 'claude-haiku-4-5', name: 'Claude Haiku 4.5' }
],
// OpenAI models for opencode (via LiteLLM proxy)
opencode: [
{ id: 'opencode/glm-4.7-free', name: 'GLM-4.7 Free' },
{ id: 'opencode/gpt-5-nano', name: 'GPT-5 Nano' },
{ id: 'opencode/grok-code', name: 'Grok Code' },
{ id: 'opencode/minimax-m2.1-free', name: 'MiniMax M2.1 Free' }
],
// Qwen models
qwen: [
{ id: 'qwen2.5-coder-32b', name: 'Qwen 2.5 Coder 32B' },
{ id: 'qwen2.5-coder', name: 'Qwen 2.5 Coder' },
{ id: 'qwen2.5-72b', name: 'Qwen 2.5 72B' },
{ id: 'qwen2-72b', name: 'Qwen 2 72B' },
{ id: 'coder-model', name: 'Qwen Coder' },
{ id: 'vision-model', name: 'Qwen Vision' }
]
};
/**
* Get fallback models for a tool
* @param toolId - Tool identifier (e.g., 'gemini', 'claude', 'codex')
* @returns Array of model info, or empty array if not found
*/
export function getFallbackModels(toolId: string): ModelInfo[] {
return LITELLM_STATIC_MODELS[toolId] ?? [];
}
/**
* Check if a tool has fallback models defined
* @param toolId - Tool identifier
* @returns true if fallback models exist
*/
export function hasFallbackModels(toolId: string): boolean {
return toolId in LITELLM_STATIC_MODELS;
}

View File

@@ -1,9 +1,9 @@
/**
* CLI Tool Model Reference Library
* CLI Tool Model Type Definitions
*
* System reference for available models per CLI tool provider.
* This is a read-only reference, NOT user configuration.
* User configuration is managed via tools.{tool}.primaryModel/secondaryModel in cli-tools.json
* Type definitions for CLI tool models.
* Model lists are now read from user configuration (cli-tools.json).
* Each tool can define availableModels in its configuration.
*/
export interface ProviderModelInfo {
@@ -19,105 +19,5 @@ export interface ProviderInfo {
models: ProviderModelInfo[];
}
/**
* System reference for CLI tool models
* Maps provider names to their available models
*/
export const PROVIDER_MODELS: Record<string, ProviderInfo> = {
google: {
name: 'Google AI',
models: [
{ id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro', capabilities: ['text', 'vision', 'code'], contextWindow: 1000000 },
{ id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash', capabilities: ['text', 'code'], contextWindow: 1000000 },
{ id: 'gemini-2.0-flash', name: 'Gemini 2.0 Flash', capabilities: ['text'], contextWindow: 1000000 },
{ id: 'gemini-1.5-pro', name: 'Gemini 1.5 Pro', capabilities: ['text', 'vision'], contextWindow: 2000000 },
{ id: 'gemini-1.5-flash', name: 'Gemini 1.5 Flash', capabilities: ['text'], contextWindow: 1000000 }
]
},
qwen: {
name: 'Qwen',
models: [
{ id: 'coder-model', name: 'Qwen Coder', capabilities: ['code'] },
{ id: 'vision-model', name: 'Qwen Vision', capabilities: ['vision'] },
{ id: 'qwen2.5-coder-32b', name: 'Qwen 2.5 Coder 32B', capabilities: ['code'] }
]
},
openai: {
name: 'OpenAI',
models: [
{ id: 'gpt-5.2', name: 'GPT-5.2', capabilities: ['text', 'code'] },
{ id: 'gpt-4.1', name: 'GPT-4.1', capabilities: ['text', 'code'] },
{ id: 'o4-mini', name: 'O4 Mini', capabilities: ['text'] },
{ id: 'o3', name: 'O3', capabilities: ['text'] }
]
},
anthropic: {
name: 'Anthropic',
models: [
{ id: 'sonnet', name: 'Claude Sonnet', capabilities: ['text', 'code'] },
{ id: 'opus', name: 'Claude Opus', capabilities: ['text', 'code', 'vision'] },
{ id: 'haiku', name: 'Claude Haiku', capabilities: ['text'] },
{ id: 'claude-sonnet-4-5-20250929', name: 'Claude 4.5 Sonnet (2025-09-29)', capabilities: ['text', 'code'] },
{ id: 'claude-opus-4-5-20251101', name: 'Claude 4.5 Opus (2025-11-01)', capabilities: ['text', 'code', 'vision'] }
]
},
litellm: {
name: 'LiteLLM Aggregator',
models: [
{ id: 'opencode/glm-4.7-free', name: 'GLM-4.7 Free', capabilities: ['text'] },
{ id: 'opencode/gpt-5-nano', name: 'GPT-5 Nano', capabilities: ['text'] },
{ id: 'opencode/grok-code', name: 'Grok Code', capabilities: ['code'] },
{ id: 'opencode/minimax-m2.1-free', name: 'MiniMax M2.1 Free', capabilities: ['text'] },
{ id: 'anthropic/claude-sonnet-4-20250514', name: 'Claude Sonnet 4 (via LiteLLM)', capabilities: ['text'] },
{ id: 'anthropic/claude-opus-4-20250514', name: 'Claude Opus 4 (via LiteLLM)', capabilities: ['text'] },
{ id: 'openai/gpt-4.1', name: 'GPT-4.1 (via LiteLLM)', capabilities: ['text'] },
{ id: 'openai/o3', name: 'O3 (via LiteLLM)', capabilities: ['text'] },
{ id: 'google/gemini-2.5-pro', name: 'Gemini 2.5 Pro (via LiteLLM)', capabilities: ['text'] },
{ id: 'google/gemini-2.5-flash', name: 'Gemini 2.5 Flash (via LiteLLM)', capabilities: ['text'] }
]
}
} as const;
/**
* Get models for a specific provider
* @param provider - Provider name (e.g., 'google', 'qwen', 'openai', 'anthropic', 'litellm')
* @returns Array of model information
*/
export function getProviderModels(provider: string): ProviderModelInfo[] {
return PROVIDER_MODELS[provider]?.models || [];
}
/**
* Get all provider names
* @returns Array of provider names
*/
export function getAllProviders(): string[] {
return Object.keys(PROVIDER_MODELS);
}
/**
* Find model information across all providers
* @param modelId - Model identifier to search for
* @returns Model information or undefined if not found
*/
export function findModelInfo(modelId: string): ProviderModelInfo | undefined {
for (const provider of Object.values(PROVIDER_MODELS)) {
const model = provider.models.find(m => m.id === modelId);
if (model) return model;
}
return undefined;
}
/**
* Get provider name for a model ID
* @param modelId - Model identifier
* @returns Provider name or undefined if not found
*/
export function getProviderForModel(modelId: string): string | undefined {
for (const [providerId, provider] of Object.entries(PROVIDER_MODELS)) {
if (provider.models.some(m => m.id === modelId)) {
return providerId;
}
}
return undefined;
}
// Re-export from claude-cli-tools for convenience
export type { ClaudeCliTool, ClaudeCliToolsConfig, CliToolName } from '../tools/claude-cli-tools.js';