diff --git a/README.md b/README.md index f84ea015..5194391f 100644 --- a/README.md +++ b/README.md @@ -149,6 +149,12 @@ The CCW Dashboard (`ccw view`) provides: --- +## 🔒 Security + +The dashboard server is **localhost-bound by default** and **API endpoints require authentication**. See `ccw/docs/SECURITY.md` for the full security model, token usage, and safe deployment guidance. + +--- + ## 🛠️ Command Reference CCW provides a rich set of commands for managing workflows, tasks, and interactions with AI tools. For a complete list and detailed descriptions of all available commands, please refer to the [**COMMAND_REFERENCE.md**](COMMAND_REFERENCE.md) file. diff --git a/ccw/docs/SECURITY.md b/ccw/docs/SECURITY.md new file mode 100644 index 00000000..7131f3f0 --- /dev/null +++ b/ccw/docs/SECURITY.md @@ -0,0 +1,104 @@ +# CCW Dashboard Server Security + +This document describes the CCW dashboard server security model, authentication, and recommended deployment practices. + +## Summary + +- **Authentication**: API endpoints require a JWT token (header or cookie). +- **Default binding**: Server binds to `127.0.0.1` by default to avoid network exposure. +- **CORS**: Only localhost origins are allowed; wildcard CORS is not used. + +## Authentication Model + +### Token Types + +CCW uses **JWT (HS256)** tokens for API authentication: + +- **Header-based**: `Authorization: Bearer ` +- **Cookie-based**: `auth_token=` (set automatically for local browser access) + +### Token Generation & Storage + +On server start, CCW generates or reuses: + +- **Secret key** (random 256-bit minimum): stored at `~/.ccw/auth/secret.key` (or under `CCW_DATA_DIR`) +- **Current token**: stored at `~/.ccw/auth/token.jwt` (or under `CCW_DATA_DIR`) + +Tokens have a **24-hour expiry**. CCW rotates tokens when re-generated near expiry. + +> **Note**: On Windows, POSIX-style `0600` permissions are best-effort; CCW still writes files with restrictive modes where supported. + +### Retrieving a Token + +To retrieve the current token from the local machine: + +```bash +curl -s http://127.0.0.1:3456/api/auth/token +``` + +This endpoint is **localhost-only** (loopback). It also sets a `HttpOnly` cookie for browser clients. + +### Using a Token + +Example (header-based): + +```bash +curl -H "Authorization: Bearer " http://127.0.0.1:3456/api/health +``` + +Browser clients typically use cookie auth automatically when the dashboard is opened from `http://127.0.0.1:` or `http://localhost:`. + +## Network Binding (Localhost by Default) + +By default, CCW binds to `127.0.0.1`: + +```bash +ccw serve --host 127.0.0.1 --port 3456 +``` + +To bind to all interfaces (advanced / higher risk): + +```bash +ccw serve --host 0.0.0.0 --port 3456 +``` + +Binding to non-localhost addresses exposes the dashboard API to the network. Only do this if you understand the risk and have controls in place. + +### Recommendations if Using `--host` + +- Use a host firewall to restrict inbound access to trusted IPs. +- Prefer VPN access over opening ports publicly. +- Treat the JWT token as a password; never share it. + +## CORS Policy + +CCW no longer uses `Access-Control-Allow-Origin: *`. + +- Allowed origins are restricted to: + - `http://localhost:` + - `http://127.0.0.1:` +- `Access-Control-Allow-Credentials: true` is set to support cookie auth. + +## Threat Model (What This Protects) + +Designed to mitigate: + +- Accidental exposure of dashboard APIs on a LAN/Wi‑Fi network. +- Cross-origin attacks from untrusted web pages attempting to call local APIs. + +Not designed to protect against: + +- A fully compromised local machine/user account. +- Deliberately exposing the server to the internet without additional perimeter security. + +## Troubleshooting + +### `401 Unauthorized` + +- Visit the dashboard page again (cookie is re-issued for localhost access), or +- Call `GET /api/auth/token` and use the returned token in the `Authorization` header. + +### Token Expired + +- Call `GET /api/auth/token` to refresh/rotate the token. + diff --git a/ccw/src/cli.ts b/ccw/src/cli.ts index b4685466..e53c84f1 100644 --- a/ccw/src/cli.ts +++ b/ccw/src/cli.ts @@ -83,6 +83,7 @@ export function run(argv: string[]): void { .description('Open workflow dashboard server with live path switching') .option('-p, --path ', 'Path to project directory', '.') .option('--port ', 'Server port', '3456') + .option('--host ', 'Server host to bind', '127.0.0.1') .option('--no-browser', 'Start server without opening browser') .action(viewCommand); @@ -92,6 +93,7 @@ export function run(argv: string[]): void { .description('Alias for view command') .option('-p, --path ', 'Initial project directory') .option('--port ', 'Server port', '3456') + .option('--host ', 'Server host to bind', '127.0.0.1') .option('--no-browser', 'Start server without opening browser') .action(serveCommand); diff --git a/ccw/src/commands/cli.ts b/ccw/src/commands/cli.ts index a50ed0db..3236de5a 100644 --- a/ccw/src/commands/cli.ts +++ b/ccw/src/commands/cli.ts @@ -5,6 +5,7 @@ import chalk from 'chalk'; import http from 'http'; +import inquirer from 'inquirer'; import { cliExecutorTool, getCliToolsStatus, @@ -26,6 +27,7 @@ import { getStorageLocationInstructions } from '../tools/storage-manager.js'; import { getHistoryStore } from '../tools/cli-history-store.js'; +import { createSpinner } from '../utils/ui.js'; // Dashboard notification settings const DASHBOARD_PORT = process.env.CCW_PORT || 3456; @@ -280,12 +282,17 @@ async function cleanStorage(options: StorageOptions): Promise { } if (!force) { - console.log(chalk.bold.yellow('\n Warning: This will delete ALL CCW storage:')); - console.log(` Location: ${stats.rootPath}`); - console.log(` Projects: ${stats.projectCount}`); - console.log(` Size: ${formatBytes(stats.totalSize)}`); - console.log(chalk.gray('\n Use --force to confirm deletion.\n')); - return; + const { proceed } = await inquirer.prompt([{ + type: 'confirm', + name: 'proceed', + message: `Delete ALL CCW storage? This will remove ${stats.projectCount} projects (${formatBytes(stats.totalSize)}). This action cannot be undone.`, + default: false + }]); + + if (!proceed) { + console.log(chalk.yellow('\n Storage clean cancelled.\n')); + return; + } } console.log(chalk.bold.cyan('\n Cleaning all storage...\n')); @@ -554,6 +561,11 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec } else if (optionPrompt) { // Use --prompt/-p option (preferred for multi-line) finalPrompt = optionPrompt; + const promptLineCount = optionPrompt.split(/\r?\n/).length; + if (promptLineCount > 3) { + console.log(chalk.dim(' 💡 Tip: Use --file option to avoid shell escaping issues with multi-line prompts')); + console.log(chalk.dim(' Example: ccw cli -f prompt.txt --tool gemini')); + } } else { // Fall back to positional argument finalPrompt = positionalPrompt; @@ -705,7 +717,6 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec } const nativeMode = noNative ? ' (prompt-concat)' : ''; const idInfo = id ? ` [${id}]` : ''; - console.log(chalk.cyan(`\n Executing ${tool} (${mode} mode${resumeInfo}${nativeMode})${idInfo}...\n`)); // Show merge details if (isMerge) { @@ -719,11 +730,31 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec // Generate execution ID for streaming (use custom ID or timestamp-based) const executionId = id || `${Date.now()}-${tool}`; const startTime = Date.now(); + const spinnerBaseText = `Executing ${tool} (${mode} mode${resumeInfo}${nativeMode})${idInfo}...`; + console.log(); + + const spinner = stream ? null : createSpinner(` ${spinnerBaseText}`).start(); + const elapsedInterval = spinner + ? setInterval(() => { + const elapsedSeconds = Math.floor((Date.now() - startTime) / 1000); + spinner.text = ` ${spinnerBaseText} (${elapsedSeconds}s elapsed)`; + }, 1000) + : null; + elapsedInterval?.unref?.(); + + if (!spinner) { + console.log(chalk.cyan(` ${spinnerBaseText}\n`)); + } // Handle process interruption (SIGINT/SIGTERM) to notify dashboard const handleInterrupt = (signal: string) => { const duration = Date.now() - startTime; - console.log(chalk.yellow(`\n Interrupted by ${signal}`)); + if (elapsedInterval) clearInterval(elapsedInterval); + if (spinner) { + spinner.warn(`Interrupted by ${signal} (${Math.floor(duration / 1000)}s elapsed)`); + } else { + console.log(chalk.yellow(`\n Interrupted by ${signal}`)); + } // Kill child process (gemini/codex/qwen CLI) if running killCurrentCliProcess(); @@ -790,6 +821,19 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec stream: !!stream // stream=true → streaming enabled (no cache), stream=false → cache output (default) }, onOutput); // Always pass onOutput for real-time dashboard streaming + if (elapsedInterval) clearInterval(elapsedInterval); + if (spinner) { + const durationSeconds = (result.execution.duration_ms / 1000).toFixed(1); + const turnInfo = result.success && result.conversation.turn_count > 1 + ? ` (turn ${result.conversation.turn_count})` + : ''; + if (result.success) { + spinner.succeed(`Completed in ${durationSeconds}s${turnInfo}`); + } else { + spinner.fail(`Failed after ${durationSeconds}s`); + } + } + // If not streaming (default), print output now // Prefer parsedOutput (from stream parser) over raw stdout for better formatting if (!stream) { @@ -802,10 +846,12 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec // Print summary with execution ID and turn info console.log(); if (result.success) { - const turnInfo = result.conversation.turn_count > 1 - ? ` (turn ${result.conversation.turn_count})` - : ''; - console.log(chalk.green(` ✓ Completed in ${(result.execution.duration_ms / 1000).toFixed(1)}s${turnInfo}`)); + if (!spinner) { + const turnInfo = result.conversation.turn_count > 1 + ? ` (turn ${result.conversation.turn_count})` + : ''; + console.log(chalk.green(` ✓ Completed in ${(result.execution.duration_ms / 1000).toFixed(1)}s${turnInfo}`)); + } console.log(chalk.gray(` ID: ${result.execution.id}`)); if (isMerge && !id) { // Merge without custom ID: updated all source conversations @@ -844,7 +890,9 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec // Delay to allow HTTP request to complete setTimeout(() => process.exit(0), 150); } else { - console.log(chalk.red(` ✗ Failed (${result.execution.status})`)); + if (!spinner) { + console.log(chalk.red(` ✗ Failed (${result.execution.status})`)); + } console.log(chalk.gray(` ID: ${result.execution.id}`)); console.log(chalk.gray(` Duration: ${(result.execution.duration_ms / 1000).toFixed(1)}s`)); console.log(chalk.gray(` Exit Code: ${result.execution.exit_code}`)); @@ -861,6 +909,8 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec } if (stderrLines.length > 30) { console.log(chalk.yellow(` ... ${stderrLines.length - 30} more lines`)); + console.log(chalk.cyan(` 💡 View full output: ccw cli output ${result.execution.id}`)); + console.log(); } console.log(chalk.gray(' ' + '─'.repeat(60))); } @@ -870,7 +920,6 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec console.log(chalk.yellow.bold(' Troubleshooting:')); console.log(chalk.gray(` • Check if ${tool} is properly installed: ccw cli status`)); console.log(chalk.gray(` • Enable debug mode: DEBUG=true ccw cli -p "..." or set DEBUG=true && ccw cli -p "..."`)); - console.log(chalk.gray(` • View full output: ccw cli output ${result.execution.id}`)); if (result.stderr?.includes('API key') || result.stderr?.includes('Authentication')) { console.log(chalk.gray(` • Check API key configuration for ${tool}`)); } @@ -901,6 +950,8 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec } } catch (error) { const err = error as Error; + if (elapsedInterval) clearInterval(elapsedInterval); + if (spinner) spinner.fail('Execution error'); console.error(chalk.red.bold(`\n ✗ Execution Error\n`)); console.error(chalk.red(` ${err.message}`)); @@ -1121,8 +1172,8 @@ export async function cliCommand( console.log(chalk.bold.cyan('\n CCW CLI Tool Executor\n')); console.log(' Unified interface for Gemini, Qwen, and Codex CLI tools.\n'); console.log(' Usage:'); - console.log(chalk.gray(' ccw cli -p "" --tool Execute with prompt')); - console.log(chalk.gray(' ccw cli -f prompt.txt --tool Execute from file')); + console.log(chalk.gray(' ccw cli -f prompt.txt --tool Execute from file (recommended for multi-line)')); + console.log(chalk.gray(' ccw cli -p "" --tool Execute with prompt (single-line)')); console.log(); console.log(' Subcommands:'); console.log(chalk.gray(' status Check CLI tools availability')); @@ -1133,8 +1184,8 @@ export async function cliCommand( console.log(chalk.gray(' test-parse [args] Debug CLI argument parsing')); console.log(); console.log(' Options:'); - console.log(chalk.gray(' -p, --prompt Prompt text')); - console.log(chalk.gray(' -f, --file Read prompt from file')); + console.log(chalk.gray(' -f, --file Read prompt from file (recommended for multi-line prompts)')); + console.log(chalk.gray(' -p, --prompt Prompt text (single-line)')); console.log(chalk.gray(' --tool Tool: gemini, qwen, codex (default: gemini)')); console.log(chalk.gray(' --mode Mode: analysis, write, auto (default: analysis)')); console.log(chalk.gray(' -d, --debug Enable debug logging for troubleshooting')); @@ -1146,6 +1197,27 @@ export async function cliCommand( console.log(chalk.gray(' --cache Cache: comma-separated @patterns and text')); console.log(chalk.gray(' --inject-mode Inject mode: none, full, progressive')); console.log(); + console.log(' Examples:'); + console.log(chalk.gray(' ccw cli -f my-prompt.txt --tool gemini')); + console.log(); + console.log(chalk.gray(' # Bash/Linux heredoc')); + console.log(chalk.gray(" ccw cli -f <(cat <<'EOF'")); + console.log(chalk.gray(' PURPOSE: Multi-line prompt')); + console.log(chalk.gray(' TASK: Example task')); + console.log(chalk.gray(' EOF')); + console.log(chalk.gray(' ) --tool gemini')); + console.log(); + console.log(chalk.gray(' # PowerShell multi-line')); + console.log(chalk.gray(" @'")); + console.log(chalk.gray(' PURPOSE: Multi-line prompt')); + console.log(chalk.gray(' TASK: Example task')); + console.log(chalk.gray(" '@ | Out-File -Encoding utf8 prompt.tmp; ccw cli -f prompt.tmp --tool gemini")); + console.log(); + console.log(chalk.gray(' ccw cli --resume --tool gemini')); + console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*.ts" --tool codex')); + console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*" --inject-mode progressive --tool gemini')); + console.log(chalk.gray(' ccw cli output --final # View result with usage hint')); + console.log(); console.log(' Cache format:'); console.log(chalk.gray(' --cache "@src/**/*.ts,@CLAUDE.md" # @patterns to pack')); console.log(chalk.gray(' --cache "@src/**/*,extra context" # patterns + text content')); @@ -1162,14 +1234,7 @@ export async function cliCommand( console.log(chalk.gray(' --offset Start from byte offset')); console.log(chalk.gray(' --limit Limit output bytes')); console.log(); - console.log(' Examples:'); - console.log(chalk.gray(' ccw cli -p "Analyze auth module" --tool gemini')); - console.log(chalk.gray(' ccw cli -f prompt.txt --tool codex --mode write')); - console.log(chalk.gray(' ccw cli -p "$(cat template.md)" --tool gemini')); - console.log(chalk.gray(' ccw cli --resume --tool gemini')); - console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*.ts" --tool codex')); - console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*" --inject-mode progressive --tool gemini')); - console.log(chalk.gray(' ccw cli output --final # View result with usage hint')); + console.log(chalk.dim(' Tip: For complex prompts, use --file to avoid shell escaping issues')); console.log(); } } diff --git a/ccw/src/commands/issue.ts b/ccw/src/commands/issue.ts index 57aa4cb0..626fe8e6 100644 --- a/ccw/src/commands/issue.ts +++ b/ccw/src/commands/issue.ts @@ -6,8 +6,18 @@ import chalk from 'chalk'; import { execSync } from 'child_process'; +import inquirer from 'inquirer'; import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, statSync } from 'fs'; import { join, resolve } from 'path'; +import { EXEC_TIMEOUTS } from '../utils/exec-constants.js'; + +function isExecTimeoutError(error: unknown): boolean { + const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null; + const code = err?.code ?? err?.errno; + if (code === 'ETIMEDOUT') return true; + const message = typeof err?.message === 'string' ? err.message : ''; + return message.includes('ETIMEDOUT'); +} // Handle EPIPE errors gracefully process.stdout.on('error', (err: NodeJS.ErrnoException) => { @@ -262,13 +272,15 @@ function getProjectRoot(): string { // Get the common git directory (points to main repo's .git) const gitCommonDir = execSync('git rev-parse --git-common-dir', { encoding: 'utf-8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_QUICK, }).trim(); // Get the current git directory const gitDir = execSync('git rev-parse --git-dir', { encoding: 'utf-8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_QUICK, }).trim(); // Normalize paths for comparison (Windows case insensitive) @@ -287,7 +299,10 @@ function getProjectRoot(): string { return mainRepoRoot; } } - } catch { + } catch (err: unknown) { + if (isExecTimeoutError(err)) { + console.warn(`[issue] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms; falling back to filesystem detection`); + } // Git command failed - fall through to manual detection } @@ -334,7 +349,7 @@ function ensureIssuesDir(): void { // ============ Issues JSONL ============ -function readIssues(): Issue[] { +export function readIssues(): Issue[] { const path = join(getIssuesDir(), 'issues.jsonl'); if (!existsSync(path)) return []; try { @@ -347,7 +362,7 @@ function readIssues(): Issue[] { } } -function writeIssues(issues: Issue[]): void { +export function writeIssues(issues: Issue[]): void { ensureIssuesDir(); const path = join(getIssuesDir(), 'issues.jsonl'); // Always add trailing newline for proper JSONL format @@ -482,7 +497,7 @@ function getSolutionsPath(issueId: string): string { return join(getIssuesDir(), 'solutions', `${issueId}.jsonl`); } -function readSolutions(issueId: string): Solution[] { +export function readSolutions(issueId: string): Solution[] { const path = getSolutionsPath(issueId); if (!existsSync(path)) return []; try { @@ -495,7 +510,7 @@ function readSolutions(issueId: string): Solution[] { } } -function writeSolutions(issueId: string, solutions: Solution[]): void { +export function writeSolutions(issueId: string, solutions: Solution[]): void { const dir = join(getIssuesDir(), 'solutions'); if (!existsSync(dir)) mkdirSync(dir, { recursive: true }); // Always add trailing newline for proper JSONL format @@ -596,7 +611,7 @@ function generateQueueFileId(): string { return `QUE-${ts}`; } -function readQueue(queueId?: string): Queue | null { +export function readQueue(queueId?: string): Queue | null { const index = readQueueIndex(); const targetId = queueId || index.active_queue_id; @@ -748,7 +763,7 @@ function parseFailureReason(reason: string): FailureDetail { }; } -function writeQueue(queue: Queue): void { +export function writeQueue(queue: Queue): void { ensureQueuesDir(); // Support both old (tasks) and new (solutions) queue format @@ -1841,6 +1856,20 @@ async function queueAction(subAction: string | undefined, issueId: string | unde process.exit(1); } + if (!options.force) { + const { proceed } = await inquirer.prompt([{ + type: 'confirm', + name: 'proceed', + message: `Delete queue ${queueId}? This action cannot be undone.`, + default: false + }]); + + if (!proceed) { + console.log(chalk.yellow('Queue deletion cancelled')); + return; + } + } + // Remove from index const index = readQueueIndex(); index.queues = index.queues.filter(q => q.id !== queueId); diff --git a/ccw/src/commands/serve.ts b/ccw/src/commands/serve.ts index 8184e0d1..1047bc89 100644 --- a/ccw/src/commands/serve.ts +++ b/ccw/src/commands/serve.ts @@ -7,6 +7,7 @@ import type { Server } from 'http'; interface ServeOptions { port?: number; path?: string; + host?: string; browser?: boolean; } @@ -16,6 +17,7 @@ interface ServeOptions { */ export async function serveCommand(options: ServeOptions): Promise { const port = options.port || 3456; + const host = options.host || '127.0.0.1'; // Validate project path let initialPath = process.cwd(); @@ -30,26 +32,34 @@ export async function serveCommand(options: ServeOptions): Promise { console.log(chalk.blue.bold('\n CCW Dashboard Server\n')); console.log(chalk.gray(` Initial project: ${initialPath}`)); + console.log(chalk.gray(` Host: ${host}`)); console.log(chalk.gray(` Port: ${port}\n`)); try { // Start server console.log(chalk.cyan(' Starting server...')); - const server = await startServer({ port, initialPath }); + const server = await startServer({ port, host, initialPath }); - const url = `http://localhost:${port}`; - console.log(chalk.green(` Server running at ${url}`)); + const boundUrl = `http://${host}:${port}`; + const browserUrl = host === '0.0.0.0' || host === '::' ? `http://localhost:${port}` : boundUrl; + + if (!['127.0.0.1', 'localhost', '::1'].includes(host)) { + console.log(chalk.yellow(`\n WARNING: Binding to ${host} exposes the server to network attacks.`)); + console.log(chalk.yellow(' Ensure firewall is configured and never expose tokens publicly.\n')); + } + + console.log(chalk.green(` Server running at ${boundUrl}`)); // Open browser if (options.browser !== false) { console.log(chalk.cyan(' Opening in browser...')); try { - await launchBrowser(url); + await launchBrowser(browserUrl); console.log(chalk.green.bold('\n Dashboard opened in browser!')); } catch (err) { const error = err as Error; console.log(chalk.yellow(`\n Could not open browser: ${error.message}`)); - console.log(chalk.gray(` Open manually: ${url}`)); + console.log(chalk.gray(` Open manually: ${browserUrl}`)); } } diff --git a/ccw/src/commands/stop.ts b/ccw/src/commands/stop.ts index 6d406b97..45576ef0 100644 --- a/ccw/src/commands/stop.ts +++ b/ccw/src/commands/stop.ts @@ -59,20 +59,47 @@ export async function stopCommand(options: StopOptions): Promise { signal: AbortSignal.timeout(2000) }).catch(() => null); - if (healthCheck && healthCheck.ok) { - // CCW server is running - send shutdown signal + if (healthCheck) { + // CCW server is running (may require authentication) - send shutdown signal console.log(chalk.cyan(' CCW server found, sending shutdown signal...')); - await fetch(`http://localhost:${port}/api/shutdown`, { + let token: string | undefined; + try { + const tokenResponse = await fetch(`http://localhost:${port}/api/auth/token`, { + signal: AbortSignal.timeout(2000) + }); + const tokenData = await tokenResponse.json() as { token?: string }; + token = tokenData.token; + } catch { + // Ignore token acquisition errors; shutdown request will fail with 401. + } + + const shutdownResponse = await fetch(`http://localhost:${port}/api/shutdown`, { method: 'POST', + headers: token ? { Authorization: `Bearer ${token}` } : undefined, signal: AbortSignal.timeout(5000) }).catch(() => null); // Wait a moment for shutdown await new Promise(resolve => setTimeout(resolve, 500)); - console.log(chalk.green.bold('\n Server stopped successfully!\n')); - process.exit(0); + if (shutdownResponse && 'ok' in shutdownResponse && shutdownResponse.ok) { + console.log(chalk.green.bold('\n Server stopped successfully!\n')); + process.exit(0); + } + + // Best-effort verify shutdown (may still succeed even if shutdown endpoint didn't return ok) + const postCheck = await fetch(`http://localhost:${port}/api/health`, { + signal: AbortSignal.timeout(2000) + }).catch(() => null); + + if (!postCheck) { + console.log(chalk.green.bold('\n Server stopped successfully!\n')); + process.exit(0); + } + + const statusHint = shutdownResponse ? `HTTP ${shutdownResponse.status}` : 'no response'; + console.log(chalk.yellow(` Shutdown request did not stop server (${statusHint}).`)); } // No CCW server responding, check if port is in use diff --git a/ccw/src/commands/view.ts b/ccw/src/commands/view.ts index 33d4fad9..0df7e2e1 100644 --- a/ccw/src/commands/view.ts +++ b/ccw/src/commands/view.ts @@ -6,6 +6,7 @@ import chalk from 'chalk'; interface ViewOptions { port?: number; path?: string; + host?: string; browser?: boolean; } @@ -30,7 +31,8 @@ async function isServerRunning(port: number): Promise { }); clearTimeout(timeoutId); - return response.ok; + // Authenticated APIs may return 401; any HTTP response means server is running. + return response.status > 0; } catch { return false; } @@ -44,8 +46,13 @@ async function isServerRunning(port: number): Promise { */ async function switchWorkspace(port: number, path: string): Promise { try { + const tokenResponse = await fetch(`http://localhost:${port}/api/auth/token`); + const tokenData = await tokenResponse.json() as { token?: string }; + const token = tokenData.token; + const response = await fetch( - `http://localhost:${port}/api/switch-path?path=${encodeURIComponent(path)}` + `http://localhost:${port}/api/switch-path?path=${encodeURIComponent(path)}`, + token ? { headers: { Authorization: `Bearer ${token}` } } : undefined ); return await response.json() as SwitchWorkspaceResult; } catch (err) { @@ -62,6 +69,8 @@ async function switchWorkspace(port: number, path: string): Promise { const port = options.port || 3456; + const host = options.host || '127.0.0.1'; + const browserHost = host === '0.0.0.0' || host === '::' ? 'localhost' : host; // Resolve workspace path let workspacePath = process.cwd(); @@ -89,7 +98,7 @@ export async function viewCommand(options: ViewOptions): Promise { console.log(chalk.green(` Workspace switched successfully`)); // Open browser with the new path - const url = `http://localhost:${port}/?path=${encodeURIComponent(result.path!)}`; + const url = `http://${browserHost}:${port}/?path=${encodeURIComponent(result.path!)}`; if (options.browser !== false) { console.log(chalk.cyan(' Opening in browser...')); @@ -113,6 +122,7 @@ export async function viewCommand(options: ViewOptions): Promise { await serveCommand({ path: workspacePath, port: port, + host, browser: options.browser }); } diff --git a/ccw/src/config/litellm-api-config-manager.ts b/ccw/src/config/litellm-api-config-manager.ts index 3447ddb9..a5b12f20 100644 --- a/ccw/src/config/litellm-api-config-manager.ts +++ b/ccw/src/config/litellm-api-config-manager.ts @@ -3,7 +3,7 @@ * Manages provider credentials, custom endpoints, and cache settings */ -import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; +import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'fs'; import { homedir } from 'os'; import { join } from 'path'; import { StoragePaths, GlobalPaths, ensureStorageDir } from './storage-paths.js'; @@ -44,6 +44,14 @@ function getConfigPath(_baseDir?: string): string { return join(configDir, 'litellm-api-config.json'); } +function bestEffortRestrictPermissions(filePath: string, mode: number): void { + try { + chmodSync(filePath, mode); + } catch { + // Ignore permission errors (e.g., Windows or restrictive environments) + } +} + /** * Load configuration from file */ @@ -68,7 +76,8 @@ export function loadLiteLLMApiConfig(baseDir: string): LiteLLMApiConfig { */ function saveConfig(baseDir: string, config: LiteLLMApiConfig): void { const configPath = getConfigPath(baseDir); - writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf-8'); + writeFileSync(configPath, JSON.stringify(config, null, 2), { encoding: 'utf8', mode: 0o600 }); + bestEffortRestrictPermissions(configPath, 0o600); } /** @@ -117,10 +126,26 @@ export function getProviderWithResolvedEnvVars( const provider = getProvider(baseDir, providerId); if (!provider) return null; - return { + const resolvedApiKey = resolveEnvVar(provider.apiKey); + + // Avoid leaking env-var syntax or secrets if this object is logged/serialized. + const sanitizedProvider: ProviderCredential = { ...provider, - resolvedApiKey: resolveEnvVar(provider.apiKey), + apiKey: '***', + apiKeys: provider.apiKeys?.map(keyEntry => ({ + ...keyEntry, + key: '***', + })), }; + + Object.defineProperty(sanitizedProvider, 'resolvedApiKey', { + value: resolvedApiKey, + enumerable: false, + writable: false, + configurable: false, + }); + + return sanitizedProvider as ProviderCredential & { resolvedApiKey: string }; } /** diff --git a/ccw/src/core/auth/csrf-manager.ts b/ccw/src/core/auth/csrf-manager.ts new file mode 100644 index 00000000..0703b583 --- /dev/null +++ b/ccw/src/core/auth/csrf-manager.ts @@ -0,0 +1,104 @@ +import { randomBytes } from 'crypto'; + +export interface CsrfTokenManagerOptions { + tokenTtlMs?: number; + cleanupIntervalMs?: number; +} + +type CsrfTokenRecord = { + sessionId: string; + expiresAtMs: number; + used: boolean; +}; + +const DEFAULT_TOKEN_TTL_MS = 15 * 60 * 1000; // 15 minutes +const DEFAULT_CLEANUP_INTERVAL_MS = 5 * 60 * 1000; // 5 minutes + +export class CsrfTokenManager { + private readonly tokenTtlMs: number; + private readonly records = new Map(); + private readonly cleanupTimer: NodeJS.Timeout | null; + + constructor(options: CsrfTokenManagerOptions = {}) { + this.tokenTtlMs = options.tokenTtlMs ?? DEFAULT_TOKEN_TTL_MS; + + const cleanupIntervalMs = options.cleanupIntervalMs ?? DEFAULT_CLEANUP_INTERVAL_MS; + if (cleanupIntervalMs > 0) { + this.cleanupTimer = setInterval(() => { + this.cleanupExpiredTokens(); + }, cleanupIntervalMs); + + if (this.cleanupTimer.unref) { + this.cleanupTimer.unref(); + } + } else { + this.cleanupTimer = null; + } + } + + dispose(): void { + if (this.cleanupTimer) { + clearInterval(this.cleanupTimer); + } + this.records.clear(); + } + + generateToken(sessionId: string): string { + const token = randomBytes(32).toString('hex'); + this.records.set(token, { + sessionId, + expiresAtMs: Date.now() + this.tokenTtlMs, + used: false, + }); + return token; + } + + validateToken(token: string, sessionId: string): boolean { + const record = this.records.get(token); + if (!record) return false; + if (record.used) return false; + if (record.sessionId !== sessionId) return false; + + if (Date.now() > record.expiresAtMs) { + this.records.delete(token); + return false; + } + + record.used = true; + return true; + } + + cleanupExpiredTokens(nowMs: number = Date.now()): number { + let removed = 0; + + for (const [token, record] of this.records.entries()) { + if (record.used || nowMs > record.expiresAtMs) { + this.records.delete(token); + removed += 1; + } + } + + return removed; + } + + getActiveTokenCount(): number { + return this.records.size; + } +} + +let csrfManagerInstance: CsrfTokenManager | null = null; + +export function getCsrfTokenManager(options?: CsrfTokenManagerOptions): CsrfTokenManager { + if (!csrfManagerInstance) { + csrfManagerInstance = new CsrfTokenManager(options); + } + return csrfManagerInstance; +} + +export function resetCsrfTokenManager(): void { + if (csrfManagerInstance) { + csrfManagerInstance.dispose(); + } + csrfManagerInstance = null; +} + diff --git a/ccw/src/core/auth/csrf-middleware.ts b/ccw/src/core/auth/csrf-middleware.ts new file mode 100644 index 00000000..d0c85d32 --- /dev/null +++ b/ccw/src/core/auth/csrf-middleware.ts @@ -0,0 +1,158 @@ +import type http from 'http'; +import type { IncomingMessage, ServerResponse } from 'http'; +import { randomBytes } from 'crypto'; +import { getCsrfTokenManager } from './csrf-manager.js'; + +export interface CsrfMiddlewareContext { + pathname: string; + req: IncomingMessage; + res: ServerResponse; +} + +function getHeaderValue(header: string | string[] | undefined): string | null { + if (!header) return null; + if (Array.isArray(header)) return header[0] ?? null; + return header; +} + +function parseCookieHeader(cookieHeader: string | null | undefined): Record { + if (!cookieHeader) return {}; + + const cookies: Record = {}; + for (const part of cookieHeader.split(';')) { + const [rawName, ...rawValueParts] = part.trim().split('='); + if (!rawName) continue; + const rawValue = rawValueParts.join('='); + try { + cookies[rawName] = decodeURIComponent(rawValue); + } catch { + cookies[rawName] = rawValue; + } + } + return cookies; +} + +function appendSetCookie(res: ServerResponse, cookie: string): void { + const existing = res.getHeader('Set-Cookie'); + if (!existing) { + res.setHeader('Set-Cookie', cookie); + return; + } + + if (Array.isArray(existing)) { + res.setHeader('Set-Cookie', [...existing, cookie]); + return; + } + + res.setHeader('Set-Cookie', [String(existing), cookie]); +} + +function setCsrfCookie(res: ServerResponse, token: string, maxAgeSeconds: number): void { + const attributes = [ + `XSRF-TOKEN=${encodeURIComponent(token)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${maxAgeSeconds}`, + ]; + appendSetCookie(res, attributes.join('; ')); +} + +function envFlagEnabled(name: string): boolean { + const value = process.env[name]; + if (!value) return false; + return ['1', 'true', 'yes', 'on'].includes(value.trim().toLowerCase()); +} + +async function readRawBody(req: IncomingMessage): Promise { + const withCache = req as http.IncomingMessage & { __ccwRawBody?: string }; + if (typeof withCache.__ccwRawBody === 'string') return withCache.__ccwRawBody; + + return new Promise((resolve, reject) => { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', () => { + withCache.__ccwRawBody = body; + resolve(body); + }); + req.on('error', reject); + }); +} + +async function readJsonBody(req: IncomingMessage): Promise { + const withCache = req as http.IncomingMessage & { body?: unknown }; + if (withCache.body !== undefined) return withCache.body; + + const raw = await readRawBody(req); + if (!raw) return undefined; + + try { + const parsed = JSON.parse(raw) as unknown; + withCache.body = parsed; + return parsed; + } catch { + return undefined; + } +} + +function extractCsrfTokenFromBody(body: unknown): string | null { + if (!body || typeof body !== 'object') return null; + const record = body as Record; + const token = record.csrfToken; + return typeof token === 'string' && token ? token : null; +} + +function writeJson(res: ServerResponse, status: number, body: Record): void { + res.writeHead(status, { 'Content-Type': 'application/json; charset=utf-8' }); + res.end(JSON.stringify(body)); +} + +export async function csrfValidation(ctx: CsrfMiddlewareContext): Promise { + const { pathname, req, res } = ctx; + + if (!pathname.startsWith('/api/')) return true; + if (envFlagEnabled('CCW_DISABLE_CSRF')) return true; + + const method = (req.method || 'GET').toUpperCase(); + if (!['POST', 'PUT', 'PATCH', 'DELETE'].includes(method)) return true; + + // Always allow token acquisition routes. + if (pathname === '/api/auth/token') return true; + + // Requests authenticated via Authorization header do not require CSRF protection. + const authorization = getHeaderValue(req.headers.authorization); + if (authorization && /^Bearer\s+.+$/i.test(authorization)) return true; + + const headerToken = getHeaderValue(req.headers['x-csrf-token']); + const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie)); + const cookieToken = cookies['XSRF-TOKEN']; + + let bodyToken: string | null = null; + if (!headerToken && !cookieToken) { + const body = await readJsonBody(req); + bodyToken = extractCsrfTokenFromBody(body); + } + + const token = headerToken || bodyToken || cookieToken || null; + const sessionId = cookies.ccw_session_id; + + if (!token || !sessionId) { + writeJson(res, 403, { error: 'CSRF validation failed' }); + return false; + } + + const tokenManager = getCsrfTokenManager(); + const ok = tokenManager.validateToken(token, sessionId); + if (!ok) { + writeJson(res, 403, { error: 'CSRF validation failed' }); + return false; + } + + const nextToken = tokenManager.generateToken(sessionId); + res.setHeader('X-CSRF-Token', nextToken); + setCsrfCookie(res, nextToken, 15 * 60); + + return true; +} diff --git a/ccw/src/core/auth/middleware.ts b/ccw/src/core/auth/middleware.ts new file mode 100644 index 00000000..c50095ba --- /dev/null +++ b/ccw/src/core/auth/middleware.ts @@ -0,0 +1,94 @@ +import type http from 'http'; +import type { IncomingMessage, ServerResponse } from 'http'; +import type { TokenManager } from './token-manager.js'; + +export interface AuthMiddlewareContext { + pathname: string; + req: IncomingMessage; + res: ServerResponse; + tokenManager: TokenManager; + secretKey: string; + unauthenticatedPaths?: Set; +} + +function parseCookieHeader(cookieHeader: string | null | undefined): Record { + if (!cookieHeader) return {}; + + const cookies: Record = {}; + for (const part of cookieHeader.split(';')) { + const [rawName, ...rawValueParts] = part.trim().split('='); + if (!rawName) continue; + const rawValue = rawValueParts.join('='); + try { + cookies[rawName] = decodeURIComponent(rawValue); + } catch { + cookies[rawName] = rawValue; + } + } + return cookies; +} + +function getHeaderValue(header: string | string[] | undefined): string | null { + if (!header) return null; + if (Array.isArray(header)) return header[0] ?? null; + return header; +} + +export function extractAuthToken(req: IncomingMessage): string | null { + const authorization = getHeaderValue(req.headers.authorization); + if (authorization) { + const match = authorization.match(/^Bearer\s+(.+)$/i); + if (match?.[1]) return match[1].trim(); + } + + const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie)); + if (cookies.auth_token) return cookies.auth_token; + + return null; +} + +export function isLocalhostRequest(req: IncomingMessage): boolean { + const remote = req.socket?.remoteAddress ?? ''; + return remote === '127.0.0.1' || remote === '::1' || remote === '::ffff:127.0.0.1'; +} + +export function setAuthCookie(res: ServerResponse, token: string, expiresAt: Date): void { + const maxAgeSeconds = Math.max(0, Math.floor((expiresAt.getTime() - Date.now()) / 1000)); + + const attributes = [ + `auth_token=${encodeURIComponent(token)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${maxAgeSeconds}`, + ]; + + res.setHeader('Set-Cookie', attributes.join('; ')); +} + +function writeJson(res: ServerResponse, status: number, body: Record): void { + res.writeHead(status, { 'Content-Type': 'application/json; charset=utf-8' }); + res.end(JSON.stringify(body)); +} + +export function authMiddleware(ctx: AuthMiddlewareContext): boolean { + const { pathname, req, res, tokenManager, secretKey, unauthenticatedPaths } = ctx; + + if (!pathname.startsWith('/api/')) return true; + if (unauthenticatedPaths?.has(pathname)) return true; + + const token = extractAuthToken(req); + if (!token) { + writeJson(res, 401, { error: 'Unauthorized' }); + return false; + } + + const ok = tokenManager.validateToken(token, secretKey); + if (!ok) { + writeJson(res, 401, { error: 'Unauthorized' }); + return false; + } + + (req as http.IncomingMessage & { authenticated?: boolean }).authenticated = true; + return true; +} diff --git a/ccw/src/core/auth/token-manager.ts b/ccw/src/core/auth/token-manager.ts new file mode 100644 index 00000000..8272f4c5 --- /dev/null +++ b/ccw/src/core/auth/token-manager.ts @@ -0,0 +1,219 @@ +import { randomBytes } from 'crypto'; +import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'fs'; +import { dirname, join } from 'path'; +import jwt from 'jsonwebtoken'; +import type { Algorithm } from 'jsonwebtoken'; +import { getCCWHome } from '../../config/storage-paths.js'; + +export interface TokenResult { + token: string; + expiresAt: Date; +} + +export interface TokenInfo extends TokenResult { + issuedAt: Date; + revokedAt?: Date; + rotatedAt?: Date; + replacedBy?: string; +} + +export interface TokenManagerOptions { + authDir?: string; + secretKeyPath?: string; + tokenPath?: string; + tokenTtlMs?: number; + rotateBeforeExpiryMs?: number; +} + +const DEFAULT_TOKEN_TTL_MS = 24 * 60 * 60 * 1000; +const DEFAULT_ROTATE_BEFORE_EXPIRY_MS = 60 * 60 * 1000; +const JWT_ALGORITHM: Algorithm = 'HS256'; + +function ensureDirectory(dirPath: string): void { + if (!existsSync(dirPath)) { + mkdirSync(dirPath, { recursive: true }); + } +} + +function bestEffortRestrictPermissions(filePath: string, mode: number): void { + try { + chmodSync(filePath, mode); + } catch { + // Ignore permission errors (e.g., Windows or restrictive environments) + } +} + +function writeSecretFile(filePath: string, content: string): void { + ensureDirectory(dirname(filePath)); + writeFileSync(filePath, content, { encoding: 'utf8', mode: 0o600 }); + bestEffortRestrictPermissions(filePath, 0o600); +} + +function writeTokenFile(filePath: string, content: string): void { + ensureDirectory(dirname(filePath)); + writeFileSync(filePath, content, { encoding: 'utf8', mode: 0o600 }); + bestEffortRestrictPermissions(filePath, 0o600); +} + +function parseJwtExpiry(token: string): Date | null { + const decoded = jwt.decode(token); + if (!decoded || typeof decoded !== 'object') return null; + if (typeof decoded.exp !== 'number') return null; + return new Date(decoded.exp * 1000); +} + +export class TokenManager { + private readonly authDir: string; + private readonly secretKeyPath: string; + private readonly tokenPath: string; + private readonly tokenTtlMs: number; + private readonly rotateBeforeExpiryMs: number; + + private secretKey: string | null = null; + private readonly activeTokens = new Map(); + + constructor(options: TokenManagerOptions = {}) { + this.authDir = options.authDir ?? join(getCCWHome(), 'auth'); + this.secretKeyPath = options.secretKeyPath ?? join(this.authDir, 'secret.key'); + this.tokenPath = options.tokenPath ?? join(this.authDir, 'token.jwt'); + this.tokenTtlMs = options.tokenTtlMs ?? DEFAULT_TOKEN_TTL_MS; + this.rotateBeforeExpiryMs = options.rotateBeforeExpiryMs ?? DEFAULT_ROTATE_BEFORE_EXPIRY_MS; + } + + getSecretKey(): string { + if (this.secretKey) return this.secretKey; + + ensureDirectory(this.authDir); + if (existsSync(this.secretKeyPath)) { + const loaded = readFileSync(this.secretKeyPath, 'utf8').trim(); + if (!loaded) { + throw new Error('Auth secret key file is empty'); + } + this.secretKey = loaded; + return loaded; + } + + const generated = randomBytes(32).toString('hex'); + writeSecretFile(this.secretKeyPath, generated); + this.secretKey = generated; + return generated; + } + + generateToken(secretKey: string): TokenResult { + const token = jwt.sign( + { + typ: 'ccw-api', + jti: randomBytes(16).toString('hex'), + }, + secretKey, + { + algorithm: JWT_ALGORITHM, + expiresIn: Math.floor(this.tokenTtlMs / 1000), + } + ); + + const expiresAt = parseJwtExpiry(token) ?? new Date(Date.now() + this.tokenTtlMs); + this.activeTokens.set(token, { token, expiresAt, issuedAt: new Date() }); + return { token, expiresAt }; + } + + validateToken(token: string, secretKey: string): boolean { + const info = this.activeTokens.get(token); + if (info?.revokedAt) return false; + + try { + jwt.verify(token, secretKey, { algorithms: [JWT_ALGORITHM] }); + return true; + } catch { + return false; + } + } + + refreshToken(token: string, secretKey: string): TokenResult { + const existing = this.activeTokens.get(token); + if (existing) { + existing.revokedAt = new Date(); + } + + const next = this.generateToken(secretKey); + if (existing) { + existing.rotatedAt = new Date(); + existing.replacedBy = next.token; + } + return next; + } + + /** + * Read an existing persisted token or create a new one. + * If the existing token is nearing expiry, rotate it. + */ + getOrCreateAuthToken(): TokenResult { + const secretKey = this.getSecretKey(); + + if (existsSync(this.tokenPath)) { + const persisted = readFileSync(this.tokenPath, 'utf8').trim(); + if (persisted && this.validateToken(persisted, secretKey)) { + const expiresAt = parseJwtExpiry(persisted); + if (expiresAt) { + // Ensure persisted token is tracked for revocation support + if (!this.activeTokens.has(persisted)) { + this.activeTokens.set(persisted, { token: persisted, expiresAt, issuedAt: new Date() }); + } + + const msUntilExpiry = expiresAt.getTime() - Date.now(); + if (msUntilExpiry > this.rotateBeforeExpiryMs) { + return { token: persisted, expiresAt }; + } + } + + // Token exists but is expiring soon (or expiry missing) → rotate + const rotated = this.generateToken(secretKey); + writeTokenFile(this.tokenPath, rotated.token); + + const existing = this.activeTokens.get(persisted); + if (existing) { + existing.rotatedAt = new Date(); + existing.replacedBy = rotated.token; + } + + return rotated; + } + } + + const created = this.generateToken(secretKey); + writeTokenFile(this.tokenPath, created.token); + return created; + } + + revokeToken(token: string): void { + const info = this.activeTokens.get(token); + if (info) { + info.revokedAt = new Date(); + } else { + this.activeTokens.set(token, { + token, + issuedAt: new Date(), + expiresAt: new Date(0), + revokedAt: new Date(), + }); + } + } +} + +let tokenManagerInstance: TokenManager | null = null; + +export function getTokenManager(options?: TokenManagerOptions): TokenManager { + if (!tokenManagerInstance) { + tokenManagerInstance = new TokenManager(options); + } + return tokenManagerInstance; +} + +export function resetTokenManager(): void { + tokenManagerInstance = null; +} + +export function getOrCreateAuthToken(): TokenResult { + return getTokenManager().getOrCreateAuthToken(); +} + diff --git a/ccw/src/core/cache-manager.ts b/ccw/src/core/cache-manager.ts index 8c7a9907..ee55d1ea 100644 --- a/ccw/src/core/cache-manager.ts +++ b/ccw/src/core/cache-manager.ts @@ -1,6 +1,6 @@ -import { existsSync, mkdirSync, readFileSync, writeFileSync, statSync, unlinkSync, readdirSync } from 'fs'; -import { join, dirname } from 'path'; -import { StoragePaths, ensureStorageDir } from '../config/storage-paths.js'; +import { readFile, readdir, stat, unlink, writeFile, mkdir } from 'fs/promises'; +import { join } from 'path'; +import { StoragePaths } from '../config/storage-paths.js'; interface CacheEntry { data: T; @@ -42,13 +42,17 @@ export class CacheManager { * @param watchPaths - Array of file/directory paths to check for modifications * @returns Cached data or null if invalid/expired */ - get(watchPaths: string[] = []): T | null { - if (!existsSync(this.cacheFile)) { + async get(watchPaths: string[] = []): Promise { + let content: string; + try { + content = await readFile(this.cacheFile, 'utf8'); + } catch (err: any) { + if (err?.code === 'ENOENT') return null; + console.warn(`Cache read error for ${this.cacheFile}:`, err?.message || String(err)); return null; } try { - const content = readFileSync(this.cacheFile, 'utf8'); const entry: CacheEntry = JSON.parse(content, (key, value) => { // Revive Map objects from JSON if (key === 'fileHashes' && value && typeof value === 'object') { @@ -67,16 +71,16 @@ export class CacheManager { // Check if any watched files have changed if (watchPaths.length > 0) { - const currentHashes = this.computeFileHashes(watchPaths); + const currentHashes = await this.computeFileHashes(watchPaths); if (!this.hashesMatch(entry.fileHashes, currentHashes)) { return null; } } return entry.data; - } catch (err) { + } catch (err: any) { // If cache file is corrupted or unreadable, treat as invalid - console.warn(`Cache read error for ${this.cacheFile}:`, (err as Error).message); + console.warn(`Cache parse error for ${this.cacheFile}:`, err?.message || String(err)); return null; } } @@ -86,17 +90,15 @@ export class CacheManager { * @param data - Data to cache * @param watchPaths - Array of file/directory paths to track */ - set(data: T, watchPaths: string[] = []): void { + async set(data: T, watchPaths: string[] = []): Promise { try { // Ensure cache directory exists - if (!existsSync(this.cacheDir)) { - mkdirSync(this.cacheDir, { recursive: true }); - } + await mkdir(this.cacheDir, { recursive: true }); const entry: CacheEntry = { data, timestamp: Date.now(), - fileHashes: this.computeFileHashes(watchPaths), + fileHashes: await this.computeFileHashes(watchPaths), ttl: this.ttl }; @@ -106,7 +108,7 @@ export class CacheManager { fileHashes: Object.fromEntries(entry.fileHashes) }; - writeFileSync(this.cacheFile, JSON.stringify(serializable, null, 2), 'utf8'); + await writeFile(this.cacheFile, JSON.stringify(serializable, null, 2), 'utf8'); } catch (err) { console.warn(`Cache write error for ${this.cacheFile}:`, (err as Error).message); } @@ -115,12 +117,11 @@ export class CacheManager { /** * Invalidate (delete) the cache */ - invalidate(): void { + async invalidate(): Promise { try { - if (existsSync(this.cacheFile)) { - unlinkSync(this.cacheFile); - } + await unlink(this.cacheFile); } catch (err) { + if ((err as any)?.code === 'ENOENT') return; console.warn(`Cache invalidation error for ${this.cacheFile}:`, (err as Error).message); } } @@ -130,8 +131,8 @@ export class CacheManager { * @param watchPaths - Array of file/directory paths to check * @returns True if cache exists and is valid */ - isValid(watchPaths: string[] = []): boolean { - return this.get(watchPaths) !== null; + async isValid(watchPaths: string[] = []): Promise { + return (await this.get(watchPaths)) !== null; } /** @@ -139,32 +140,29 @@ export class CacheManager { * @param watchPaths - Array of file/directory paths * @returns Map of path to mtime */ - private computeFileHashes(watchPaths: string[]): Map { + private async computeFileHashes(watchPaths: string[]): Promise> { const hashes = new Map(); - for (const path of watchPaths) { + await Promise.all(watchPaths.map(async (watchPath) => { try { - if (!existsSync(path)) { - continue; - } - - const stats = statSync(path); + const stats = await stat(watchPath); if (stats.isDirectory()) { // For directories, use directory mtime (detects file additions/deletions) - hashes.set(path, stats.mtimeMs); + hashes.set(watchPath, stats.mtimeMs); // Also recursively scan for workflow session files - this.scanDirectory(path, hashes); + await this.scanDirectory(watchPath, hashes); } else { // For files, use file mtime - hashes.set(path, stats.mtimeMs); + hashes.set(watchPath, stats.mtimeMs); } - } catch (err) { + } catch (err: any) { + if (err?.code === 'ENOENT') return; // Skip paths that can't be accessed - console.warn(`Cannot access path ${path}:`, (err as Error).message); + console.warn(`Cannot access path ${watchPath}:`, err?.message || String(err)); } - } + })); return hashes; } @@ -175,26 +173,34 @@ export class CacheManager { * @param hashes - Map to store file hashes * @param depth - Current recursion depth (max 3) */ - private scanDirectory(dirPath: string, hashes: Map, depth: number = 0): void { + private async scanDirectory(dirPath: string, hashes: Map, depth: number = 0): Promise { if (depth > 3) return; // Limit recursion depth try { - const entries = readdirSync(dirPath, { withFileTypes: true }); + const entries = await readdir(dirPath, { withFileTypes: true }); - for (const entry of entries) { + await Promise.all(entries.map(async (entry) => { const fullPath = join(dirPath, entry.name); if (entry.isDirectory()) { // Track important directories if (entry.name === '.task' || entry.name === '.review' || entry.name === '.summaries') { - const stats = statSync(fullPath); - hashes.set(fullPath, stats.mtimeMs); - this.scanDirectory(fullPath, hashes, depth + 1); + try { + const stats = await stat(fullPath); + hashes.set(fullPath, stats.mtimeMs); + await this.scanDirectory(fullPath, hashes, depth + 1); + } catch { + // ignore + } } else if (entry.name.startsWith('WFS-')) { // Scan WFS session directories - const stats = statSync(fullPath); - hashes.set(fullPath, stats.mtimeMs); - this.scanDirectory(fullPath, hashes, depth + 1); + try { + const stats = await stat(fullPath); + hashes.set(fullPath, stats.mtimeMs); + await this.scanDirectory(fullPath, hashes, depth + 1); + } catch { + // ignore + } } } else if (entry.isFile()) { // Track important files @@ -204,11 +210,15 @@ export class CacheManager { entry.name === 'TODO_LIST.md' || entry.name === 'workflow-session.json' ) { - const stats = statSync(fullPath); - hashes.set(fullPath, stats.mtimeMs); + try { + const stats = await stat(fullPath); + hashes.set(fullPath, stats.mtimeMs); + } catch { + // ignore + } } } - } + })); } catch (err) { // Skip directories that can't be read console.warn(`Cannot scan directory ${dirPath}:`, (err as Error).message); @@ -245,21 +255,24 @@ export class CacheManager { * Get cache statistics * @returns Cache info object */ - getStats(): { exists: boolean; age?: number; fileCount?: number; size?: number } { - if (!existsSync(this.cacheFile)) { + async getStats(): Promise<{ exists: boolean; age?: number; fileCount?: number; size?: number }> { + let fileStats; + try { + fileStats = await stat(this.cacheFile); + } catch (err: any) { + if (err?.code === 'ENOENT') return { exists: false }; return { exists: false }; } try { - const stats = statSync(this.cacheFile); - const content = readFileSync(this.cacheFile, 'utf8'); + const content = await readFile(this.cacheFile, 'utf8'); const entry = JSON.parse(content); return { exists: true, age: Date.now() - entry.timestamp, fileCount: Object.keys(entry.fileHashes || {}).length, - size: stats.size + size: fileStats.size }; } catch { return { exists: false }; @@ -287,6 +300,5 @@ export function createDashboardCache(workflowDir: string, ttl?: number): CacheMa // Use centralized storage path const projectPath = extractProjectPath(workflowDir); const cacheDir = StoragePaths.project(projectPath).cache; - ensureStorageDir(cacheDir); return new CacheManager('dashboard-data', { cacheDir, ttl }); } diff --git a/ccw/src/core/claude-freshness.ts b/ccw/src/core/claude-freshness.ts index 945c7893..db8caa3b 100644 --- a/ccw/src/core/claude-freshness.ts +++ b/ccw/src/core/claude-freshness.ts @@ -7,6 +7,15 @@ import { execSync } from 'child_process'; import { existsSync, statSync, readdirSync } from 'fs'; import { dirname, extname, relative, join } from 'path'; import { getCoreMemoryStore, ClaudeUpdateRecord } from './core-memory-store.js'; +import { EXEC_TIMEOUTS } from '../utils/exec-constants.js'; + +function isExecTimeoutError(error: unknown): boolean { + const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null; + const code = err?.code ?? err?.errno; + if (code === 'ETIMEDOUT') return true; + const message = typeof err?.message === 'string' ? err.message : ''; + return message.includes('ETIMEDOUT'); +} // Source file extensions to track (from detect-changed-modules.ts) const SOURCE_EXTENSIONS = [ @@ -53,9 +62,12 @@ export interface FreshnessResponse { */ function isGitRepo(basePath: string): boolean { try { - execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe' }); + execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe', timeout: EXEC_TIMEOUTS.GIT_QUICK }); return true; - } catch (e) { + } catch (e: unknown) { + if (isExecTimeoutError(e)) { + console.warn(`[Claude Freshness] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`); + } return false; } } @@ -68,10 +80,14 @@ export function getCurrentGitCommit(basePath: string): string | null { const output = execSync('git rev-parse HEAD', { cwd: basePath, encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_QUICK, }).trim(); return output || null; - } catch (e) { + } catch (e: unknown) { + if (isExecTimeoutError(e)) { + console.warn(`[Claude Freshness] git rev-parse HEAD timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`); + } return null; } } @@ -91,7 +107,8 @@ function getChangedFilesSince(basePath: string, modulePath: string, sinceDate: s { cwd: basePath, encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_LOG, } ).trim(); @@ -103,7 +120,10 @@ function getChangedFilesSince(basePath: string, modulePath: string, sinceDate: s const ext = extname(f).toLowerCase(); return SOURCE_EXTENSIONS.includes(ext); }); - } catch (e) { + } catch (e: unknown) { + if (isExecTimeoutError(e)) { + console.warn(`[Claude Freshness] git log timed out after ${EXEC_TIMEOUTS.GIT_LOG}ms, falling back to mtime scan`); + } // Fallback to mtime-based detection return findFilesModifiedSince(modulePath, sinceDate); } diff --git a/ccw/src/core/cors.ts b/ccw/src/core/cors.ts new file mode 100644 index 00000000..f8272694 --- /dev/null +++ b/ccw/src/core/cors.ts @@ -0,0 +1,10 @@ +export function validateCorsOrigin(origin: string, port: number): boolean { + return origin === `http://localhost:${port}` || origin === `http://127.0.0.1:${port}`; +} + +export function getCorsOrigin(origin: string | undefined, port: number): string { + const fallback = `http://localhost:${port}`; + if (!origin) return fallback; + return validateCorsOrigin(origin, port) ? origin : fallback; +} + diff --git a/ccw/src/core/dashboard-generator-patch.ts b/ccw/src/core/dashboard-generator-patch.ts index 1158ca03..118bad6a 100644 --- a/ccw/src/core/dashboard-generator-patch.ts +++ b/ccw/src/core/dashboard-generator-patch.ts @@ -1,4 +1,3 @@ -// @ts-nocheck // Add after line 13 (after REVIEW_TEMPLATE constant) // Modular dashboard JS files (in dependency order) diff --git a/ccw/src/core/dashboard-generator.ts b/ccw/src/core/dashboard-generator.ts index efeb0559..103add03 100644 --- a/ccw/src/core/dashboard-generator.ts +++ b/ccw/src/core/dashboard-generator.ts @@ -1,8 +1,59 @@ -// @ts-nocheck import { readFileSync, existsSync } from 'fs'; import { join, dirname } from 'path'; import { fileURLToPath } from 'url'; +interface ReviewDimensionInfo { + count: number; + [key: string]: unknown; +} + +interface ReviewData { + totalFindings: number; + severityDistribution: Record; + dimensionSummary: Record; + [key: string]: unknown; +} + +interface SessionTaskData { + status?: string; + title?: string; + task_id?: string; + [key: string]: unknown; +} + +interface SessionData { + session_id?: string; + project?: string; + created_at?: string; + tasks: SessionTaskData[]; + taskCount: number; + [key: string]: unknown; +} + +interface DashboardStatistics { + totalSessions: number; + activeSessions: number; + totalTasks: number; + completedTasks: number; + [key: string]: unknown; +} + +interface DashboardData { + generatedAt?: string; + activeSessions: SessionData[]; + archivedSessions: SessionData[]; + statistics: DashboardStatistics; + reviewData?: ReviewData; + liteTasks?: { + litePlan?: unknown[]; + liteFix?: unknown[]; + [key: string]: unknown; + }; + projectPath?: string; + recentPaths?: string[]; + [key: string]: unknown; +} + const __filename = fileURLToPath(import.meta.url); const __dirname = dirname(__filename); @@ -111,18 +162,19 @@ const MODULE_FILES = [ * @returns {Promise} - Generated HTML */ export async function generateDashboard(data: unknown): Promise { + const dashboardData = (data ?? {}) as DashboardData; // Use new unified template (with sidebar layout) if (existsSync(UNIFIED_TEMPLATE)) { - return generateFromUnifiedTemplate(data); + return generateFromUnifiedTemplate(dashboardData); } // Fallback to legacy workflow template if (existsSync(WORKFLOW_TEMPLATE)) { - return generateFromBundledTemplate(data, WORKFLOW_TEMPLATE); + return generateFromBundledTemplate(dashboardData, WORKFLOW_TEMPLATE); } // Fallback to inline dashboard if templates missing - return generateInlineDashboard(data); + return generateInlineDashboard(dashboardData); } /** @@ -130,7 +182,7 @@ export async function generateDashboard(data: unknown): Promise { * @param {Object} data - Dashboard data * @returns {string} - Generated HTML */ -function generateFromUnifiedTemplate(data: unknown): string { +function generateFromUnifiedTemplate(data: DashboardData): string { let html = readFileSync(UNIFIED_TEMPLATE, 'utf8'); // Read and concatenate modular CSS files in load order @@ -198,7 +250,7 @@ function generateFromUnifiedTemplate(data: unknown): string { * @param {string} templatePath - Path to workflow-dashboard.html * @returns {string} - Generated HTML */ -function generateFromBundledTemplate(data: unknown, templatePath: string): string { +function generateFromBundledTemplate(data: DashboardData, templatePath: string): string { let html = readFileSync(templatePath, 'utf8'); // Prepare workflow data for injection @@ -224,7 +276,7 @@ function generateFromBundledTemplate(data: unknown, templatePath: string): strin * @param {Object} reviewData - Review data to display * @returns {string} - Modified HTML with review tab */ -function injectReviewTab(html, reviewData) { +function injectReviewTab(html: string, reviewData: ReviewData): string { // Add review tab button in header controls const tabButtonHtml = ` @@ -266,10 +318,10 @@ function injectReviewTab(html, reviewData) { * @param {Object} reviewData - Review data * @returns {string} - HTML for review section */ -function generateReviewSection(reviewData) { +function generateReviewSection(reviewData: ReviewData): string { const severityBars = Object.entries(reviewData.severityDistribution) .map(([severity, count]) => { - const colors = { + const colors: Record = { critical: '#c53030', high: '#f56565', medium: '#ed8936', @@ -404,7 +456,7 @@ function generateReviewSection(reviewData) { * @param {Object} reviewData - Review data * @returns {string} - JavaScript code */ -function generateReviewScript(reviewData) { +function generateReviewScript(reviewData: ReviewData): string { return ` // Review tab functionality const reviewTabBtn = document.getElementById('reviewTabBtn'); @@ -444,7 +496,7 @@ function generateReviewScript(reviewData) { * @param {Object} data - Dashboard data * @returns {string} */ -function generateInlineDashboard(data: unknown): string { +function generateInlineDashboard(data: DashboardData): string { const stats = data.statistics; const hasReviews = data.reviewData && data.reviewData.totalFindings > 0; @@ -623,7 +675,7 @@ function generateInlineDashboard(data: unknown): string { - ${hasReviews ? renderReviewTab(data.reviewData) : ''} + ${hasReviews ? renderReviewTab(data.reviewData as ReviewData) : ''} @@ -666,7 +718,7 @@ function generateInlineDashboard(data: unknown): string { * @param {boolean} isActive - Whether session is active * @returns {string} - HTML string */ -function renderSessionCard(session, isActive) { +function renderSessionCard(session: SessionData, isActive: boolean): string { const completedTasks = isActive ? session.tasks.filter(t => t.status === 'completed').length : session.taskCount; @@ -704,7 +756,7 @@ function renderSessionCard(session, isActive) { * @param {Object} reviewData - Review data * @returns {string} - HTML string */ -function renderReviewTab(reviewData) { +function renderReviewTab(reviewData: ReviewData): string { const { severityDistribution, dimensionSummary } = reviewData; return ` @@ -741,4 +793,4 @@ function renderReviewTab(reviewData) { `; -} \ No newline at end of file +} diff --git a/ccw/src/core/data-aggregator.ts b/ccw/src/core/data-aggregator.ts index 961c2aa5..9e1dbad9 100644 --- a/ccw/src/core/data-aggregator.ts +++ b/ccw/src/core/data-aggregator.ts @@ -197,7 +197,7 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s ]; // Check cache first - const cachedData = cache.get(watchPaths); + const cachedData = await cache.get(watchPaths); if (cachedData !== null) { console.log('Using cached dashboard data'); return cachedData; @@ -269,7 +269,7 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s } // Store in cache before returning - cache.set(data, watchPaths); + await cache.set(data, watchPaths); return data; } diff --git a/ccw/src/core/lite-scanner.ts b/ccw/src/core/lite-scanner.ts index 6ffdf7c8..8b6fef9f 100644 --- a/ccw/src/core/lite-scanner.ts +++ b/ccw/src/core/lite-scanner.ts @@ -1,4 +1,4 @@ -import { existsSync, readdirSync, readFileSync, statSync } from 'fs'; +import { readFile, readdir, stat } from 'fs/promises'; import { join } from 'path'; interface TaskMeta { @@ -85,10 +85,12 @@ export async function scanLiteTasks(workflowDir: string): Promise { const litePlanDir = join(workflowDir, '.lite-plan'); const liteFixDir = join(workflowDir, '.lite-fix'); - return { - litePlan: scanLiteDir(litePlanDir, 'lite-plan'), - liteFix: scanLiteDir(liteFixDir, 'lite-fix') - }; + const [litePlan, liteFix] = await Promise.all([ + scanLiteDir(litePlanDir, 'lite-plan'), + scanLiteDir(liteFixDir, 'lite-fix'), + ]); + + return { litePlan, liteFix }; } /** @@ -97,39 +99,45 @@ export async function scanLiteTasks(workflowDir: string): Promise { * @param type - Task type ('lite-plan' or 'lite-fix') * @returns Array of lite task sessions */ -function scanLiteDir(dir: string, type: string): LiteSession[] { - if (!existsSync(dir)) return []; - +async function scanLiteDir(dir: string, type: string): Promise { try { - const sessions = readdirSync(dir, { withFileTypes: true }) - .filter(d => d.isDirectory()) - .map(d => { - const sessionPath = join(dir, d.name); - const session: LiteSession = { - id: d.name, - type, - path: sessionPath, - createdAt: getCreatedTime(sessionPath), - plan: loadPlanJson(sessionPath), - tasks: loadTaskJsons(sessionPath), - progress: { total: 0, completed: 0, percentage: 0 } - }; + const entries = await readdir(dir, { withFileTypes: true }); - // For lite-fix sessions, also load diagnoses separately - if (type === 'lite-fix') { - session.diagnoses = loadDiagnoses(sessionPath); - } + const sessions = (await Promise.all( + entries + .filter((entry) => entry.isDirectory()) + .map(async (entry) => { + const sessionPath = join(dir, entry.name); - // Calculate progress - session.progress = calculateProgress(session.tasks); + const [createdAt, plan, tasks, diagnoses] = await Promise.all([ + getCreatedTime(sessionPath), + loadPlanJson(sessionPath), + loadTaskJsons(sessionPath), + type === 'lite-fix' ? loadDiagnoses(sessionPath) : Promise.resolve(undefined), + ]); - return session; - }) + const session: LiteSession = { + id: entry.name, + type, + path: sessionPath, + createdAt, + plan, + tasks, + diagnoses, + progress: { total: 0, completed: 0, percentage: 0 }, + }; + + session.progress = calculateProgress(session.tasks); + return session; + }), + )) + .filter((session): session is LiteSession => session !== null) .sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime()); return sessions; - } catch (err) { - console.error(`Error scanning ${dir}:`, (err as Error).message); + } catch (err: any) { + if (err?.code === 'ENOENT') return []; + console.error(`Error scanning ${dir}:`, err?.message || String(err)); return []; } } @@ -139,32 +147,26 @@ function scanLiteDir(dir: string, type: string): LiteSession[] { * @param sessionPath - Session directory path * @returns Plan data or null */ -function loadPlanJson(sessionPath: string): unknown | null { +async function loadPlanJson(sessionPath: string): Promise { // Try fix-plan.json first (for lite-fix), then plan.json (for lite-plan) const fixPlanPath = join(sessionPath, 'fix-plan.json'); const planPath = join(sessionPath, 'plan.json'); // Try fix-plan.json first - if (existsSync(fixPlanPath)) { - try { - const content = readFileSync(fixPlanPath, 'utf8'); - return JSON.parse(content); - } catch { - // Continue to try plan.json - } + try { + const content = await readFile(fixPlanPath, 'utf8'); + return JSON.parse(content); + } catch { + // Continue to try plan.json } // Fallback to plan.json - if (existsSync(planPath)) { - try { - const content = readFileSync(planPath, 'utf8'); - return JSON.parse(content); - } catch { - return null; - } + try { + const content = await readFile(planPath, 'utf8'); + return JSON.parse(content); + } catch { + return null; } - - return null; } /** @@ -176,54 +178,54 @@ function loadPlanJson(sessionPath: string): unknown | null { * @param sessionPath - Session directory path * @returns Array of task objects */ -function loadTaskJsons(sessionPath: string): NormalizedTask[] { +async function loadTaskJsons(sessionPath: string): Promise { let tasks: NormalizedTask[] = []; // Method 1: Check .task/IMPL-*.json files const taskDir = join(sessionPath, '.task'); - if (existsSync(taskDir)) { - try { - const implTasks = readdirSync(taskDir) - .filter(f => f.endsWith('.json') && ( - f.startsWith('IMPL-') || - f.startsWith('TASK-') || - f.startsWith('task-') || - f.startsWith('diagnosis-') || - /^T\d+\.json$/i.test(f) - )) - .map(f => { - const taskPath = join(taskDir, f); - try { - const content = readFileSync(taskPath, 'utf8'); - return normalizeTask(JSON.parse(content)); - } catch { - return null; - } - }) - .filter((t): t is NormalizedTask => t !== null); - tasks = tasks.concat(implTasks); - } catch { - // Continue to other methods - } + try { + const implFiles = (await readdir(taskDir)) + .filter((fileName) => fileName.endsWith('.json') && ( + fileName.startsWith('IMPL-') || + fileName.startsWith('TASK-') || + fileName.startsWith('task-') || + fileName.startsWith('diagnosis-') || + /^T\d+\.json$/i.test(fileName) + )); + + const implTasks = (await Promise.all( + implFiles.map(async (fileName) => { + const taskPath = join(taskDir, fileName); + try { + const content = await readFile(taskPath, 'utf8'); + return normalizeTask(JSON.parse(content)); + } catch { + return null; + } + }), + )) + .filter((task): task is NormalizedTask => task !== null); + + tasks = tasks.concat(implTasks); + } catch { + // Continue to other methods } // Method 2: Check plan.json or fix-plan.json for embedded tasks array if (tasks.length === 0) { - // Try fix-plan.json first (for lite-fix), then plan.json (for lite-plan) - const fixPlanPath = join(sessionPath, 'fix-plan.json'); - const planPath = join(sessionPath, 'plan.json'); + const planFiles = [join(sessionPath, 'fix-plan.json'), join(sessionPath, 'plan.json')]; - const planFile = existsSync(fixPlanPath) ? fixPlanPath : - existsSync(planPath) ? planPath : null; - - if (planFile) { + for (const planFile of planFiles) { try { - const plan = JSON.parse(readFileSync(planFile, 'utf8')) as { tasks?: unknown[] }; + const plan = JSON.parse(await readFile(planFile, 'utf8')) as { tasks?: unknown[] }; if (Array.isArray(plan.tasks)) { - tasks = plan.tasks.map(t => normalizeTask(t)).filter((t): t is NormalizedTask => t !== null); + tasks = plan.tasks + .map((task) => normalizeTask(task)) + .filter((task): task is NormalizedTask => task !== null); + break; } } catch { - // Continue to other methods + // Continue to other plan files } } } @@ -231,23 +233,27 @@ function loadTaskJsons(sessionPath: string): NormalizedTask[] { // Method 3: Check for task-*.json and diagnosis-*.json files in session root if (tasks.length === 0) { try { - const rootTasks = readdirSync(sessionPath) - .filter(f => f.endsWith('.json') && ( - f.startsWith('task-') || - f.startsWith('TASK-') || - f.startsWith('diagnosis-') || - /^T\d+\.json$/i.test(f) - )) - .map(f => { - const taskPath = join(sessionPath, f); + const rootFiles = (await readdir(sessionPath)) + .filter((fileName) => fileName.endsWith('.json') && ( + fileName.startsWith('task-') || + fileName.startsWith('TASK-') || + fileName.startsWith('diagnosis-') || + /^T\d+\.json$/i.test(fileName) + )); + + const rootTasks = (await Promise.all( + rootFiles.map(async (fileName) => { + const taskPath = join(sessionPath, fileName); try { - const content = readFileSync(taskPath, 'utf8'); + const content = await readFile(taskPath, 'utf8'); return normalizeTask(JSON.parse(content)); } catch { return null; } - }) - .filter((t): t is NormalizedTask => t !== null); + }), + )) + .filter((task): task is NormalizedTask => task !== null); + tasks = tasks.concat(rootTasks); } catch { // No tasks found @@ -333,10 +339,10 @@ function normalizeTask(task: unknown): NormalizedTask | null { * @param dirPath - Directory path * @returns ISO date string */ -function getCreatedTime(dirPath: string): string { +async function getCreatedTime(dirPath: string): Promise { try { - const stat = statSync(dirPath); - return stat.birthtime.toISOString(); + const stats = await stat(dirPath); + return stats.birthtime.toISOString(); } catch { return new Date().toISOString(); } @@ -366,28 +372,37 @@ function calculateProgress(tasks: NormalizedTask[]): Progress { * @param sessionId - Session ID * @returns Detailed task info */ -export function getLiteTaskDetail(workflowDir: string, type: string, sessionId: string): LiteTaskDetail | null { +export async function getLiteTaskDetail(workflowDir: string, type: string, sessionId: string): Promise { const dir = type === 'lite-plan' ? join(workflowDir, '.lite-plan', sessionId) : join(workflowDir, '.lite-fix', sessionId); - if (!existsSync(dir)) return null; + try { + const stats = await stat(dir); + if (!stats.isDirectory()) return null; + } catch { + return null; + } + + const [plan, tasks, explorations, clarifications, diagnoses] = await Promise.all([ + loadPlanJson(dir), + loadTaskJsons(dir), + loadExplorations(dir), + loadClarifications(dir), + type === 'lite-fix' ? loadDiagnoses(dir) : Promise.resolve(undefined), + ]); const detail: LiteTaskDetail = { id: sessionId, type, path: dir, - plan: loadPlanJson(dir), - tasks: loadTaskJsons(dir), - explorations: loadExplorations(dir), - clarifications: loadClarifications(dir) + plan, + tasks, + explorations, + clarifications, + diagnoses, }; - // For lite-fix sessions, also load diagnoses - if (type === 'lite-fix') { - detail.diagnoses = loadDiagnoses(dir); - } - return detail; } @@ -396,12 +411,11 @@ export function getLiteTaskDetail(workflowDir: string, type: string, sessionId: * @param sessionPath - Session directory path * @returns Exploration results */ -function loadExplorations(sessionPath: string): unknown[] { +async function loadExplorations(sessionPath: string): Promise { const explorePath = join(sessionPath, 'explorations.json'); - if (!existsSync(explorePath)) return []; try { - const content = readFileSync(explorePath, 'utf8'); + const content = await readFile(explorePath, 'utf8'); return JSON.parse(content); } catch { return []; @@ -413,12 +427,11 @@ function loadExplorations(sessionPath: string): unknown[] { * @param sessionPath - Session directory path * @returns Clarification data */ -function loadClarifications(sessionPath: string): unknown | null { +async function loadClarifications(sessionPath: string): Promise { const clarifyPath = join(sessionPath, 'clarifications.json'); - if (!existsSync(clarifyPath)) return null; try { - const content = readFileSync(clarifyPath, 'utf8'); + const content = await readFile(clarifyPath, 'utf8'); return JSON.parse(content); } catch { return null; @@ -431,7 +444,7 @@ function loadClarifications(sessionPath: string): unknown | null { * @param sessionPath - Session directory path * @returns Diagnoses data with manifest and items */ -function loadDiagnoses(sessionPath: string): Diagnoses { +async function loadDiagnoses(sessionPath: string): Promise { const result: Diagnoses = { manifest: null, items: [] @@ -439,32 +452,35 @@ function loadDiagnoses(sessionPath: string): Diagnoses { // Try to load diagnoses-manifest.json first const manifestPath = join(sessionPath, 'diagnoses-manifest.json'); - if (existsSync(manifestPath)) { - try { - result.manifest = JSON.parse(readFileSync(manifestPath, 'utf8')); - } catch { - // Continue without manifest - } + try { + result.manifest = JSON.parse(await readFile(manifestPath, 'utf8')); + } catch { + // Continue without manifest } // Load all diagnosis-*.json files from session root try { - const diagnosisFiles = readdirSync(sessionPath) - .filter(f => f.startsWith('diagnosis-') && f.endsWith('.json')); + const diagnosisFiles = (await readdir(sessionPath)) + .filter((fileName) => fileName.startsWith('diagnosis-') && fileName.endsWith('.json')); - for (const file of diagnosisFiles) { - const filePath = join(sessionPath, file); - try { - const content = JSON.parse(readFileSync(filePath, 'utf8')) as Record; - result.items.push({ - id: file.replace('diagnosis-', '').replace('.json', ''), - filename: file, - ...content - }); - } catch { - // Skip invalid files - } - } + const items = (await Promise.all( + diagnosisFiles.map(async (fileName) => { + const filePath = join(sessionPath, fileName); + try { + const content = JSON.parse(await readFile(filePath, 'utf8')) as Record; + return { + id: fileName.replace('diagnosis-', '').replace('.json', ''), + filename: fileName, + ...content, + } satisfies DiagnosisItem; + } catch { + return null; + } + }), + )) + .filter((item): item is DiagnosisItem => item !== null); + + result.items.push(...items); } catch { // Return empty items if directory read fails } diff --git a/ccw/src/core/routes/auth-routes.ts b/ccw/src/core/routes/auth-routes.ts new file mode 100644 index 00000000..274bec4b --- /dev/null +++ b/ccw/src/core/routes/auth-routes.ts @@ -0,0 +1,98 @@ +import type { IncomingMessage, ServerResponse } from 'http'; +import { randomBytes } from 'crypto'; +import { getCsrfTokenManager } from '../auth/csrf-manager.js'; + +export interface RouteContext { + pathname: string; + url: URL; + req: IncomingMessage; + res: ServerResponse; + initialPath: string; + handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; + broadcastToClients: (data: unknown) => void; +} + +function getHeaderValue(header: string | string[] | undefined): string | null { + if (!header) return null; + if (Array.isArray(header)) return header[0] ?? null; + return header; +} + +function parseCookieHeader(cookieHeader: string | null | undefined): Record { + if (!cookieHeader) return {}; + + const cookies: Record = {}; + for (const part of cookieHeader.split(';')) { + const [rawName, ...rawValueParts] = part.trim().split('='); + if (!rawName) continue; + const rawValue = rawValueParts.join('='); + try { + cookies[rawName] = decodeURIComponent(rawValue); + } catch { + cookies[rawName] = rawValue; + } + } + return cookies; +} + +function appendSetCookie(res: ServerResponse, cookie: string): void { + const existing = res.getHeader('Set-Cookie'); + if (!existing) { + res.setHeader('Set-Cookie', cookie); + return; + } + + if (Array.isArray(existing)) { + res.setHeader('Set-Cookie', [...existing, cookie]); + return; + } + + res.setHeader('Set-Cookie', [String(existing), cookie]); +} + +function getOrCreateSessionId(req: IncomingMessage, res: ServerResponse): string { + const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie)); + const existing = cookies.ccw_session_id; + if (existing) return existing; + + const created = randomBytes(16).toString('hex'); + const attributes = [ + `ccw_session_id=${encodeURIComponent(created)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${24 * 60 * 60}`, + ]; + appendSetCookie(res, attributes.join('; ')); + return created; +} + +function setCsrfCookie(res: ServerResponse, token: string, maxAgeSeconds: number): void { + const attributes = [ + `XSRF-TOKEN=${encodeURIComponent(token)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${maxAgeSeconds}`, + ]; + appendSetCookie(res, attributes.join('; ')); +} + +export async function handleAuthRoutes(ctx: RouteContext): Promise { + const { pathname, req, res } = ctx; + + if (pathname === '/api/csrf-token' && req.method === 'GET') { + const sessionId = getOrCreateSessionId(req, res); + const tokenManager = getCsrfTokenManager(); + const csrfToken = tokenManager.generateToken(sessionId); + + res.setHeader('X-CSRF-Token', csrfToken); + setCsrfCookie(res, csrfToken, 15 * 60); + res.writeHead(200, { 'Content-Type': 'application/json; charset=utf-8' }); + res.end(JSON.stringify({ csrfToken })); + return true; + } + + return false; +} + diff --git a/ccw/src/core/routes/ccw-routes.ts b/ccw/src/core/routes/ccw-routes.ts index 425681ae..e3dc2f21 100644 --- a/ccw/src/core/routes/ccw-routes.ts +++ b/ccw/src/core/routes/ccw-routes.ts @@ -1,21 +1,10 @@ -// @ts-nocheck /** * CCW Routes Module * Handles all CCW-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { getAllManifests } from '../manifest.js'; import { listTools } from '../../tools/index.js'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; /** * Handle CCW routes @@ -43,13 +32,14 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise { // API: CCW Upgrade if (pathname === '/api/ccw/upgrade' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { path: installPath } = body; + const { path: installPath } = body as { path?: unknown }; + const resolvedInstallPath = typeof installPath === 'string' && installPath.trim().length > 0 ? installPath : undefined; try { const { spawn } = await import('child_process'); // Run ccw upgrade command - const args = installPath ? ['upgrade', '--all'] : ['upgrade', '--all']; + const args = resolvedInstallPath ? ['upgrade', '--all'] : ['upgrade', '--all']; const upgradeProcess = spawn('ccw', args, { shell: true, stdio: ['ignore', 'pipe', 'pipe'] @@ -58,16 +48,16 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise { let stdout = ''; let stderr = ''; - upgradeProcess.stdout.on('data', (data) => { + upgradeProcess.stdout?.on('data', (data: Buffer) => { stdout += data.toString(); }); - upgradeProcess.stderr.on('data', (data) => { + upgradeProcess.stderr?.on('data', (data: Buffer) => { stderr += data.toString(); }); return new Promise((resolve) => { - upgradeProcess.on('close', (code) => { + upgradeProcess.on('close', (code: number | null) => { if (code === 0) { resolve({ success: true, message: 'Upgrade completed', output: stdout }); } else { @@ -75,7 +65,7 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise { } }); - upgradeProcess.on('error', (err) => { + upgradeProcess.on('error', (err: Error) => { resolve({ success: false, error: err.message, status: 500 }); }); @@ -85,8 +75,8 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise { resolve({ success: false, error: 'Upgrade timed out', status: 504 }); }, 120000); }); - } catch (err) { - return { success: false, error: err.message, status: 500 }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; } }); return true; diff --git a/ccw/src/core/routes/claude-routes.ts b/ccw/src/core/routes/claude-routes.ts index 8a3baf77..8198dc28 100644 --- a/ccw/src/core/routes/claude-routes.ts +++ b/ccw/src/core/routes/claude-routes.ts @@ -1,22 +1,11 @@ -// @ts-nocheck /** * CLAUDE.md Routes Module * Handles all CLAUDE.md memory rules management endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, writeFileSync, existsSync, readdirSync, statSync, unlinkSync, mkdirSync } from 'fs'; -import { join, relative } from 'path'; +import { dirname, join, relative } from 'path'; import { homedir } from 'os'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; interface ClaudeFile { id: string; @@ -498,7 +487,7 @@ function createNewClaudeFile(level: 'user' | 'project' | 'module', template: str } // Ensure directory exists - const dir = filePath.substring(0, filePath.lastIndexOf('/') || filePath.lastIndexOf('\\')); + const dir = dirname(filePath); if (!existsSync(dir)) { mkdirSync(dir, { recursive: true }); } @@ -616,7 +605,7 @@ export async function handleClaudeRoutes(ctx: RouteContext): Promise { if (!result.success) { return { error: 'CLI execution failed', - details: result.execution?.error || 'No output received', + details: result.stderr || result.execution?.output?.stderr || 'No output received', status: 500 }; } diff --git a/ccw/src/core/routes/cli-routes.ts b/ccw/src/core/routes/cli-routes.ts index d5d9f7ae..01f1506f 100644 --- a/ccw/src/core/routes/cli-routes.ts +++ b/ccw/src/core/routes/cli-routes.ts @@ -1,9 +1,7 @@ -// @ts-nocheck /** * CLI Routes Module * Handles all CLI-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { getCliToolsStatus, getCliToolsFullStatus, @@ -44,16 +42,7 @@ import { updateCodeIndexMcp, getCodeIndexMcp } from '../../tools/claude-cli-tools.js'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; // ========== Active Executions State ========== // Stores running CLI executions for state recovery when view is opened/refreshed diff --git a/ccw/src/core/routes/codexlens-routes.ts b/ccw/src/core/routes/codexlens-routes.ts index c0b18d50..fc066f54 100644 --- a/ccw/src/core/routes/codexlens-routes.ts +++ b/ccw/src/core/routes/codexlens-routes.ts @@ -1,2898 +1,23 @@ /** * CodexLens Routes Module - * Handles all CodexLens-related API endpoints - * - * TODO: Remove @ts-nocheck and add proper types: - * - Define interfaces for request body types (ConfigBody, CleanBody, InitBody, etc.) - * - Type error catches: (e: unknown) => { const err = e as Error; ... } - * - Add null checks for extractJSON results - * - Type the handlePostRequest callback body parameter + * Handles all CodexLens-related API endpoints. */ -// @ts-nocheck -import type { IncomingMessage, ServerResponse } from 'http'; -import { - checkVenvStatus, - bootstrapVenv, - executeCodexLens, - checkSemanticStatus, - ensureLiteLLMEmbedderReady, - installSemantic, - detectGpuSupport, - uninstallCodexLens, - cancelIndexing, - isIndexingInProgress, - getVenvPythonPath -} from '../../tools/codex-lens.js'; -import type { ProgressInfo, GpuMode } from '../../tools/codex-lens.js'; -import { loadLiteLLMApiConfig } from '../../config/litellm-api-config-manager.js'; -import { spawn, ChildProcess } from 'child_process'; -import * as fs from 'fs'; -import * as path from 'path'; -import * as os from 'os'; -// ============================================================ -// WATCHER PERSISTENCE CONFIGURATION -// ============================================================ - -interface WatcherConfig { - enabled: boolean; - debounce_ms: number; -} - -interface PendingQueueStatus { - file_count: number; - files: string[]; - countdown_seconds: number; - last_event_time: number | null; -} - -interface IndexResultDetail { - files_indexed: number; - files_removed: number; - symbols_added: number; - symbols_removed: number; - files_success: string[]; - files_failed: string[]; - errors: string[]; - timestamp: number; -} - -interface WatcherStats { - running: boolean; - root_path: string; - events_processed: number; - start_time: Date | null; - pending_queue: PendingQueueStatus | null; - last_index_result: IndexResultDetail | null; - index_history: IndexResultDetail[]; -} - -interface ActiveWatcher { - process: ChildProcess; - stats: WatcherStats; -} - -// Configuration file path: ~/.codexlens/watchers.json -const WATCHER_CONFIG_DIR = path.join(os.homedir(), '.codexlens'); -const WATCHER_CONFIG_FILE = path.join(WATCHER_CONFIG_DIR, 'watchers.json'); - -// Active watchers Map: normalized_path -> { process, stats } -const activeWatchers = new Map(); - -// Flag to ensure watchers are initialized only once -let watchersInitialized = false; - -/** - * Normalize path for consistent key usage - * - Convert to absolute path - * - Convert to lowercase on Windows - * - Use forward slashes - */ -function normalizePath(inputPath: string): string { - const resolved = path.resolve(inputPath); - // Use lowercase on Windows for case-insensitive comparison - return process.platform === 'win32' ? resolved.toLowerCase() : resolved; -} - -/** - * Read watcher configuration from ~/.codexlens/watchers.json - * Returns empty object if file doesn't exist or has errors - */ -function readWatcherConfig(): Record { - try { - if (!fs.existsSync(WATCHER_CONFIG_FILE)) { - return {}; - } - const content = fs.readFileSync(WATCHER_CONFIG_FILE, 'utf-8'); - return JSON.parse(content); - } catch (err) { - console.warn('[CodexLens] Failed to read watcher config:', err); - return {}; - } -} - -/** - * Write watcher configuration to ~/.codexlens/watchers.json - * Creates directory if it doesn't exist - */ -function writeWatcherConfig(config: Record): void { - try { - // Ensure config directory exists - if (!fs.existsSync(WATCHER_CONFIG_DIR)) { - fs.mkdirSync(WATCHER_CONFIG_DIR, { recursive: true }); - } - fs.writeFileSync(WATCHER_CONFIG_FILE, JSON.stringify(config, null, 2), 'utf-8'); - } catch (err) { - console.error('[CodexLens] Failed to write watcher config:', err); - throw err; - } -} - -// ============================================================ - -// ============================================================ -// PROCESS MANAGEMENT FUNCTIONS -// ============================================================ - -/** - * Start watcher process for the given path - * Creates process, registers handlers, and updates activeWatchers Map - */ -async function startWatcherProcess( - targetPath: string, - debounce_ms: number, - broadcastToClients: (data: unknown) => void -): Promise<{ success: boolean; error?: string; pid?: number }> { - const normalizedPath = normalizePath(targetPath); - - // Check if watcher already running for this path - if (activeWatchers.has(normalizedPath)) { - return { success: false, error: 'Watcher already running for this path' }; - } - - try { - const { existsSync, statSync } = await import('fs'); - - // Validate path exists and is a directory - if (!existsSync(targetPath)) { - return { success: false, error: `Path does not exist: ${targetPath}` }; - } - const pathStat = statSync(targetPath); - if (!pathStat.isDirectory()) { - return { success: false, error: `Path is not a directory: ${targetPath}` }; - } - - // Get the codexlens CLI path - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - return { success: false, error: 'CodexLens not installed' }; - } - - // Verify directory is indexed before starting watcher - try { - const statusResult = await executeCodexLens(['projects', 'list', '--json']); - if (statusResult.success && statusResult.stdout) { - const parsed = extractJSON(statusResult.stdout); - const projects = parsed.result || parsed || []; - const normalizedTarget = targetPath.toLowerCase().replace(/\\/g, '/'); - const isIndexed = Array.isArray(projects) && projects.some((p: { source_root: string }) => - p.source_root && p.source_root.toLowerCase().replace(/\\/g, '/') === normalizedTarget - ); - if (!isIndexed) { - return { - success: false, - error: `Directory is not indexed: ${targetPath}. Run 'codexlens init' first.` - }; - } - } - } catch (err) { - console.warn('[CodexLens] Could not verify index status:', err); - // Continue anyway - watcher will fail with proper error if not indexed - } - - // Spawn watch process using Python (no shell: true for security) - const pythonPath = getVenvPythonPath(); - const args = ['-m', 'codexlens', 'watch', targetPath, '--debounce', String(debounce_ms)]; - const childProcess = spawn(pythonPath, args, { - cwd: targetPath, - stdio: ['ignore', 'pipe', 'pipe'], - env: { ...process.env } - }); - - const stats: WatcherStats = { - running: true, - root_path: targetPath, - events_processed: 0, - start_time: new Date(), - pending_queue: null, - last_index_result: null, - index_history: [] - }; - - // Register in activeWatchers Map - activeWatchers.set(normalizedPath, { process: childProcess, stats }); - - // Capture stderr for error messages (capped at 4KB to prevent memory leak) - const MAX_STDERR_SIZE = 4096; - let stderrBuffer = ''; - if (childProcess.stderr) { - childProcess.stderr.on('data', (data: Buffer) => { - stderrBuffer += data.toString(); - if (stderrBuffer.length > MAX_STDERR_SIZE) { - stderrBuffer = stderrBuffer.slice(-MAX_STDERR_SIZE); - } - }); - } - - // Handle process output for JSON parsing and event counting - if (childProcess.stdout) { - childProcess.stdout.on('data', (data: Buffer) => { - const output = data.toString(); - const watcher = activeWatchers.get(normalizedPath); - if (!watcher) return; - - // Process output line by line for reliable JSON parsing - // (handles nested arrays/objects that simple regex can't match) - const lines = output.split('\n'); - let hasIndexResult = false; - - for (const line of lines) { - // Parse [QUEUE_STATUS] JSON - if (line.includes('[QUEUE_STATUS]')) { - const jsonStart = line.indexOf('{'); - if (jsonStart !== -1) { - try { - const queueStatus: PendingQueueStatus = JSON.parse(line.slice(jsonStart)); - watcher.stats.pending_queue = queueStatus; - broadcastToClients({ - type: 'CODEXLENS_WATCHER_QUEUE_UPDATE', - payload: { path: targetPath, queue: queueStatus } - }); - } catch (e) { - console.warn('[CodexLens] Failed to parse queue status:', e, line); - } - } - } - - // Parse [INDEX_RESULT] JSON - if (line.includes('[INDEX_RESULT]')) { - const jsonStart = line.indexOf('{'); - if (jsonStart !== -1) { - try { - const indexResult: IndexResultDetail = JSON.parse(line.slice(jsonStart)); - watcher.stats.last_index_result = indexResult; - watcher.stats.index_history.push(indexResult); - if (watcher.stats.index_history.length > 10) { - watcher.stats.index_history.shift(); - } - watcher.stats.events_processed += indexResult.files_indexed + indexResult.files_removed; - watcher.stats.pending_queue = null; - hasIndexResult = true; - - broadcastToClients({ - type: 'CODEXLENS_WATCHER_INDEX_COMPLETE', - payload: { path: targetPath, result: indexResult } - }); - } catch (e) { - console.warn('[CodexLens] Failed to parse index result:', e, line); - } - } - } - } - - // Legacy event counting (fallback) - const matches = output.match(/Processed \d+ events?/g); - if (matches && !hasIndexResult) { - watcher.stats.events_processed += matches.length; - } - }); - } - - // Handle spawn errors (e.g., ENOENT) - childProcess.on('error', (err: Error) => { - console.error(`[CodexLens] Watcher spawn error for ${targetPath}: ${err.message}`); - const watcher = activeWatchers.get(normalizedPath); - if (watcher) { - watcher.stats.running = false; - } - activeWatchers.delete(normalizedPath); - broadcastToClients({ - type: 'CODEXLENS_WATCHER_STATUS', - payload: { running: false, path: targetPath, error: `Spawn error: ${err.message}` } - }); - }); - - // Handle process exit - childProcess.on('exit', (code: number) => { - console.log(`[CodexLens] Watcher exited with code ${code} for ${targetPath}`); - const watcher = activeWatchers.get(normalizedPath); - if (watcher) { - watcher.stats.running = false; - } - activeWatchers.delete(normalizedPath); - - // Broadcast error if exited with non-zero code - if (code !== 0) { - const errorMsg = stderrBuffer.trim() || `Exited with code ${code}`; - const cleanError = stripAnsiCodes(errorMsg); - broadcastToClients({ - type: 'CODEXLENS_WATCHER_STATUS', - payload: { running: false, path: targetPath, error: cleanError } - }); - } else { - broadcastToClients({ - type: 'CODEXLENS_WATCHER_STATUS', - payload: { running: false, path: targetPath } - }); - } - }); - - // Broadcast watcher started - broadcastToClients({ - type: 'CODEXLENS_WATCHER_STATUS', - payload: { running: true, path: targetPath } - }); - - console.log(`[CodexLens] Watcher started for ${targetPath} (PID: ${childProcess.pid})`); - - return { - success: true, - pid: childProcess.pid - }; - } catch (err: any) { - return { success: false, error: err.message }; - } -} - -/** - * Stop watcher process for the given path - * Gracefully stops process, removes from activeWatchers Map - */ -async function stopWatcherProcess( - targetPath: string, - broadcastToClients: (data: unknown) => void -): Promise<{ success: boolean; error?: string; stats?: { events_processed: number; uptime_seconds: number } }> { - const normalizedPath = normalizePath(targetPath); - - const watcher = activeWatchers.get(normalizedPath); - if (!watcher || !watcher.stats.running) { - return { success: false, error: 'Watcher not running for this path' }; - } - - try { - // Send SIGTERM to gracefully stop the watcher - watcher.process.kill('SIGTERM'); - - // Wait a moment for graceful shutdown - await new Promise(resolve => setTimeout(resolve, 500)); - - // Force kill if still running - if (watcher.process && !watcher.process.killed) { - watcher.process.kill('SIGKILL'); - } - - const finalStats = { - events_processed: watcher.stats.events_processed, - uptime_seconds: watcher.stats.start_time - ? Math.floor((Date.now() - watcher.stats.start_time.getTime()) / 1000) - : 0 - }; - - // Update stats and remove from Map - watcher.stats.running = false; - watcher.stats.root_path = ''; - watcher.stats.events_processed = 0; - watcher.stats.start_time = null; - activeWatchers.delete(normalizedPath); - - // Broadcast watcher stopped - broadcastToClients({ - type: 'CODEXLENS_WATCHER_STATUS', - payload: { running: false, path: targetPath } - }); - - console.log(`[CodexLens] Watcher stopped for ${targetPath}`); - - return { - success: true, - stats: finalStats - }; - } catch (err: any) { - return { success: false, error: err.message }; - } -} - -// ============================================================ -// AUTO-RECOVERY ON SERVER START -// ============================================================ - -/** - * Initialize watchers from persisted configuration - * Called on server startup to restore watchers from ~/.codexlens/watchers.json - */ -async function initializeWatchers(broadcastToClients: (data: unknown) => void): Promise { - const config = readWatcherConfig(); - const enabledWatchers = Object.entries(config).filter(([_, cfg]) => cfg.enabled); - - if (enabledWatchers.length === 0) { - console.log('[CodexLens] No watchers to restore'); - return; - } - - console.log(`[CodexLens] Restoring ${enabledWatchers.length} watcher(s) from config...`); - - for (const [watchPath, cfg] of enabledWatchers) { - try { - const result = await startWatcherProcess(watchPath, cfg.debounce_ms, broadcastToClients); - if (result.success) { - console.log(`[CodexLens] Restored watcher for ${watchPath}`); - } else { - console.warn(`[CodexLens] Failed to restore watcher for ${watchPath}: ${result.error}`); - // Keep config entry but mark as disabled (will be re-enabled manually) - config[watchPath].enabled = false; - writeWatcherConfig(config); - } - } catch (err: any) { - console.error(`[CodexLens] Error restoring watcher for ${watchPath}:`, err.message); - } - } -} - -// LEGACY STATE (Deprecated - use activeWatchers Map instead) -// ============================================================ - - -// File watcher state (persisted across requests) -let watcherProcess: any = null; -let watcherStats = { - running: false, - root_path: '', - events_processed: 0, - start_time: null as Date | null -}; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} - -/** - * Strip ANSI color codes from string - * Rich library adds color codes even with --json flag - */ -function stripAnsiCodes(str: string): string { - // ANSI escape code pattern: \x1b[...m or \x1b]... - return str.replace(/\x1b\[[0-9;]*m/g, '') - .replace(/\x1b\][0-9;]*\x07/g, '') - .replace(/\x1b\][^\x07]*\x07/g, ''); -} - -/** - * Format file size to human readable string - */ -function formatSize(bytes: number): string { - if (bytes === 0) return '0 B'; - const units = ['B', 'KB', 'MB', 'GB', 'TB']; - const k = 1024; - const i = Math.floor(Math.log(bytes) / Math.log(k)); - const size = parseFloat((bytes / Math.pow(k, i)).toFixed(i < 2 ? 0 : 1)); - return size + ' ' + units[i]; -} - -/** - * Extract JSON from CLI output that may contain logging messages - * CodexLens CLI outputs logs like "INFO ..." before the JSON - * Also strips ANSI color codes that Rich library adds - * Handles trailing content after JSON (e.g., "INFO: Done" messages) - */ -function extractJSON(output: string): any { - // Strip ANSI color codes first - const cleanOutput = stripAnsiCodes(output); - - // Find the first { or [ character (start of JSON) - const jsonStart = cleanOutput.search(/[{\[]/); - if (jsonStart === -1) { - throw new Error('No JSON found in output'); - } - - const startChar = cleanOutput[jsonStart]; - const endChar = startChar === '{' ? '}' : ']'; - - // Find matching closing brace/bracket using a simple counter - let depth = 0; - let inString = false; - let escapeNext = false; - let jsonEnd = -1; - - for (let i = jsonStart; i < cleanOutput.length; i++) { - const char = cleanOutput[i]; - - if (escapeNext) { - escapeNext = false; - continue; - } - - if (char === '\\' && inString) { - escapeNext = true; - continue; - } - - if (char === '"') { - inString = !inString; - continue; - } - - if (!inString) { - if (char === startChar) { - depth++; - } else if (char === endChar) { - depth--; - if (depth === 0) { - jsonEnd = i + 1; - break; - } - } - } - } - - if (jsonEnd === -1) { - // Fallback: try to parse from start to end (original behavior) - const jsonString = cleanOutput.substring(jsonStart); - return JSON.parse(jsonString); - } - - const jsonString = cleanOutput.substring(jsonStart, jsonEnd); - return JSON.parse(jsonString); -} +import type { RouteContext } from './types.js'; +import { handleCodexLensConfigRoutes } from './codexlens/config-handlers.js'; +import { handleCodexLensIndexRoutes } from './codexlens/index-handlers.js'; +import { handleCodexLensSemanticRoutes } from './codexlens/semantic-handlers.js'; +import { handleCodexLensWatcherRoutes } from './codexlens/watcher-handlers.js'; /** * Handle CodexLens routes * @returns true if route was handled, false otherwise */ export async function handleCodexLensRoutes(ctx: RouteContext): Promise { - const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx; - - // API: CodexLens Index List - Get all indexed projects with details - - // Initialize watchers on first request (restore from config) - if (!watchersInitialized) { - watchersInitialized = true; - // Run async initialization without blocking the request - initializeWatchers(broadcastToClients).catch(err => { - console.error('[CodexLens] Failed to initialize watchers:', err); - }); - } - - if (pathname === '/api/codexlens/indexes') { - try { - // Check if CodexLens is installed first (without auto-installing) - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, indexes: [], totalSize: 0, totalSizeFormatted: '0 B' })); - return true; - } - - // Execute all CLI commands in parallel - const [configResult, projectsResult, statusResult] = await Promise.all([ - executeCodexLens(['config', '--json']), - executeCodexLens(['projects', 'list', '--json']), - executeCodexLens(['status', '--json']) - ]); - - let indexDir = ''; - if (configResult.success) { - try { - const config = extractJSON(configResult.output); - if (config.success && config.result) { - // CLI returns index_dir (not index_root) - indexDir = config.result.index_dir || config.result.index_root || ''; - } - } catch (e) { - console.error('[CodexLens] Failed to parse config for index list:', e.message); - } - } - - let indexes: any[] = []; - let totalSize = 0; - let vectorIndexCount = 0; - let normalIndexCount = 0; - - if (projectsResult.success) { - try { - const projectsData = extractJSON(projectsResult.output); - if (projectsData.success && Array.isArray(projectsData.result)) { - const { stat, readdir } = await import('fs/promises'); - const { existsSync } = await import('fs'); - const { basename, join } = await import('path'); - - for (const project of projectsData.result) { - // Skip test/temp projects - if (project.source_root && ( - project.source_root.includes('\\Temp\\') || - project.source_root.includes('/tmp/') || - project.total_files === 0 - )) { - continue; - } - - let projectSize = 0; - let hasVectorIndex = false; - let hasNormalIndex = true; // All projects have FTS index - let lastModified = null; - - // Try to get actual index size from index_root - if (project.index_root && existsSync(project.index_root)) { - try { - const files = await readdir(project.index_root); - for (const file of files) { - try { - const filePath = join(project.index_root, file); - const fileStat = await stat(filePath); - projectSize += fileStat.size; - if (!lastModified || fileStat.mtime > lastModified) { - lastModified = fileStat.mtime; - } - // Check for vector/embedding files - if (file.includes('vector') || file.includes('embedding') || - file.endsWith('.faiss') || file.endsWith('.npy') || - file.includes('semantic_chunks')) { - hasVectorIndex = true; - } - } catch (e) { - // Skip files we can't stat - } - } - } catch (e) { - // Can't read index directory - } - } - - if (hasVectorIndex) vectorIndexCount++; - if (hasNormalIndex) normalIndexCount++; - totalSize += projectSize; - - // Use source_root as the display name - const displayName = project.source_root ? basename(project.source_root) : `project_${project.id}`; - - indexes.push({ - id: displayName, - path: project.source_root || '', - indexPath: project.index_root || '', - size: projectSize, - sizeFormatted: formatSize(projectSize), - fileCount: project.total_files || 0, - dirCount: project.total_dirs || 0, - hasVectorIndex, - hasNormalIndex, - status: project.status || 'active', - lastModified: lastModified ? lastModified.toISOString() : null - }); - } - - // Sort by file count (most files first), then by name - indexes.sort((a, b) => { - if (b.fileCount !== a.fileCount) return b.fileCount - a.fileCount; - return a.id.localeCompare(b.id); - }); - } - } catch (e) { - console.error('[CodexLens] Failed to parse projects list:', e.message); - } - } - - // Parse summary stats from status command (already fetched in parallel) - let statusSummary: any = {}; - - if (statusResult.success) { - try { - const status = extractJSON(statusResult.output); - if (status.success && status.result) { - statusSummary = { - totalProjects: status.result.projects_count || indexes.length, - totalFiles: status.result.total_files || 0, - totalDirs: status.result.total_dirs || 0, - // Keep calculated totalSize for consistency with per-project sizes - // status.index_size_bytes includes shared resources (models, cache) - indexSizeBytes: totalSize, - indexSizeMb: totalSize / (1024 * 1024), - embeddings: status.result.embeddings || {}, - // Store full index dir size separately for reference - fullIndexDirSize: status.result.index_size_bytes || 0, - fullIndexDirSizeFormatted: formatSize(status.result.index_size_bytes || 0) - }; - } - } catch (e) { - console.error('[CodexLens] Failed to parse status:', e.message); - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - indexDir, - indexes, - summary: { - totalProjects: indexes.length, - totalSize, - totalSizeFormatted: formatSize(totalSize), - vectorIndexCount, - normalIndexCount, - ...statusSummary - } - })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens Status - if (pathname === '/api/codexlens/status') { - const status = await checkVenvStatus(); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(status)); - return true; - } - - // API: CodexLens Dashboard Init - Aggregated endpoint for page initialization - if (pathname === '/api/codexlens/dashboard-init') { - try { - const venvStatus = await checkVenvStatus(); - - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - installed: false, - status: venvStatus, - config: { index_dir: '~/.codexlens/indexes', index_count: 0 }, - semantic: { available: false } - })); - return true; - } - - // Parallel fetch all initialization data - const [configResult, statusResult, semanticStatus] = await Promise.all([ - executeCodexLens(['config', '--json']), - executeCodexLens(['status', '--json']), - checkSemanticStatus() - ]); - - // Parse config - let config = { index_dir: '~/.codexlens/indexes', index_count: 0 }; - if (configResult.success) { - try { - const configData = extractJSON(configResult.output); - if (configData.success && configData.result) { - config.index_dir = configData.result.index_dir || configData.result.index_root || config.index_dir; - } - } catch (e) { - console.error('[CodexLens] Failed to parse config for dashboard init:', e.message); - } - } - - // Parse status - let statusData: any = {}; - if (statusResult.success) { - try { - const status = extractJSON(statusResult.output); - if (status.success && status.result) { - config.index_count = status.result.projects_count || 0; - statusData = status.result; - } - } catch (e) { - console.error('[CodexLens] Failed to parse status for dashboard init:', e.message); - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - installed: true, - status: venvStatus, - config, - semantic: semanticStatus, - statusData - })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens Bootstrap (Install) - if (pathname === '/api/codexlens/bootstrap' && req.method === 'POST') { - handlePostRequest(req, res, async () => { - try { - const result = await bootstrapVenv(); - if (result.success) { - const status = await checkVenvStatus(); - // Broadcast installation event - broadcastToClients({ - type: 'CODEXLENS_INSTALLED', - payload: { version: status.version, timestamp: new Date().toISOString() } - }); - return { success: true, message: 'CodexLens installed successfully', version: status.version }; - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Uninstall - if (pathname === '/api/codexlens/uninstall' && req.method === 'POST') { - handlePostRequest(req, res, async () => { - try { - // Stop watcher if running (to release file handles) - if (watcherStats.running && watcherProcess) { - console.log('[CodexLens] Stopping watcher before uninstall...'); - try { - watcherProcess.kill('SIGTERM'); - await new Promise(resolve => setTimeout(resolve, 500)); - if (watcherProcess && !watcherProcess.killed) { - watcherProcess.kill('SIGKILL'); - } - } catch { - // Ignore errors stopping watcher - } - watcherStats.running = false; - watcherProcess = null; - } - - // Cancel any running indexing process using exported function - if (isIndexingInProgress()) { - console.log('[CodexLens] Cancelling indexing before uninstall...'); - try { - cancelIndexing(); - } catch { - // Ignore errors - } - } - - // Wait a moment for processes to fully exit and release handles - await new Promise(resolve => setTimeout(resolve, 1000)); - - const result = await uninstallCodexLens(); - if (result.success) { - // Broadcast uninstallation event - broadcastToClients({ - type: 'CODEXLENS_UNINSTALLED', - payload: { timestamp: new Date().toISOString() } - }); - return { success: true, message: 'CodexLens uninstalled successfully' }; - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Config - GET (Get current configuration with index count) - if (pathname === '/api/codexlens/config' && req.method === 'GET') { - try { - // Check if CodexLens is installed first (without auto-installing) - const venvStatus = await checkVenvStatus(); - - let responseData = { index_dir: '~/.codexlens/indexes', index_count: 0, api_max_workers: 4, api_batch_size: 8 }; - - // If not installed, return default config without executing CodexLens - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(responseData)); - return true; - } - - // Fetch both config and status to merge index_count - const [configResult, statusResult] = await Promise.all([ - executeCodexLens(['config', '--json']), - executeCodexLens(['status', '--json']) - ]); - - // Parse config (extract JSON from output that may contain log messages) - if (configResult.success) { - try { - const config = extractJSON(configResult.output); - if (config.success && config.result) { - // CLI returns index_dir (not index_root) - responseData.index_dir = config.result.index_dir || config.result.index_root || responseData.index_dir; - // Extract API settings - if (config.result.api_max_workers !== undefined) { - responseData.api_max_workers = config.result.api_max_workers; - } - if (config.result.api_batch_size !== undefined) { - responseData.api_batch_size = config.result.api_batch_size; - } - } - } catch (e) { - console.error('[CodexLens] Failed to parse config:', e.message); - console.error('[CodexLens] Config output:', configResult.output.substring(0, 200)); - } - } - - // Parse status to get index_count (projects_count) - if (statusResult.success) { - try { - const status = extractJSON(statusResult.output); - if (status.success && status.result) { - responseData.index_count = status.result.projects_count || 0; - } - } catch (e) { - console.error('[CodexLens] Failed to parse status:', e.message); - console.error('[CodexLens] Status output:', statusResult.output.substring(0, 200)); - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(responseData)); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: err.message })); - } - return true; - } - - // API: CodexLens Config - POST (Set configuration) - if (pathname === '/api/codexlens/config' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { index_dir, api_max_workers, api_batch_size } = body; - - if (!index_dir) { - return { success: false, error: 'index_dir is required', status: 400 }; - } - - // Validate index_dir path - const indexDirStr = String(index_dir).trim(); - - // Check for dangerous patterns - if (indexDirStr.includes('\0')) { - return { success: false, error: 'Invalid path: contains null bytes', status: 400 }; - } - - // Prevent system root paths and their subdirectories (Windows and Unix) - const dangerousPaths = ['/', 'C:\\', 'C:/', '/etc', '/usr', '/bin', '/sys', '/proc', '/var', - 'C:\\Windows', 'C:\\Program Files', 'C:\\Program Files (x86)', 'C:\\System32']; - const normalizedPath = indexDirStr.replace(/\\/g, '/').toLowerCase(); - for (const dangerous of dangerousPaths) { - const dangerousLower = dangerous.replace(/\\/g, '/').toLowerCase(); - // Block exact match OR any subdirectory (using startsWith) - if (normalizedPath === dangerousLower || - normalizedPath === dangerousLower + '/' || - normalizedPath.startsWith(dangerousLower + '/')) { - return { success: false, error: 'Invalid path: cannot use system directories or their subdirectories', status: 400 }; - } - } - - // Additional check: prevent path traversal attempts - if (normalizedPath.includes('../') || normalizedPath.includes('/..')) { - return { success: false, error: 'Invalid path: path traversal not allowed', status: 400 }; - } - - // Validate api settings - if (api_max_workers !== undefined) { - const workers = Number(api_max_workers); - if (isNaN(workers) || workers < 1 || workers > 32) { - return { success: false, error: 'api_max_workers must be between 1 and 32', status: 400 }; - } - } - if (api_batch_size !== undefined) { - const batch = Number(api_batch_size); - if (isNaN(batch) || batch < 1 || batch > 64) { - return { success: false, error: 'api_batch_size must be between 1 and 64', status: 400 }; - } - } - - try { - // Set index_dir - const result = await executeCodexLens(['config', 'set', 'index_dir', indexDirStr, '--json']); - if (!result.success) { - return { success: false, error: result.error || 'Failed to update index_dir', status: 500 }; - } - - // Set API settings if provided - if (api_max_workers !== undefined) { - await executeCodexLens(['config', 'set', 'api_max_workers', String(api_max_workers), '--json']); - } - if (api_batch_size !== undefined) { - await executeCodexLens(['config', 'set', 'api_batch_size', String(api_batch_size), '--json']); - } - - return { success: true, message: 'Configuration updated successfully' }; - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Clean (Clean indexes) - if (pathname === '/api/codexlens/clean' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { all = false, path } = body; - - try { - const args = ['clean']; - if (all) { - args.push('--all'); - } else if (path) { - // Path is passed as a positional argument, not as a flag - args.push(path); - } - args.push('--json'); - - const result = await executeCodexLens(args); - if (result.success) { - return { success: true, message: 'Indexes cleaned successfully' }; - } else { - return { success: false, error: result.error || 'Failed to clean indexes', status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Init (Initialize workspace index) - if (pathname === '/api/codexlens/init' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: projectPath, indexType = 'vector', embeddingModel = 'code', embeddingBackend = 'fastembed', maxWorkers = 1, incremental = true } = body; - const targetPath = projectPath || initialPath; - - // Ensure LiteLLM backend dependencies are installed before running the CLI - if (indexType !== 'normal' && embeddingBackend === 'litellm') { - const installResult = await ensureLiteLLMEmbedderReady(); - if (!installResult.success) { - return { success: false, error: installResult.error || 'Failed to prepare LiteLLM embedder', status: 500 }; - } - } - - // Build CLI arguments based on index type - // Use 'index init' subcommand (new CLI structure) - const args = ['index', 'init', targetPath, '--json']; - - // Force mode: when incremental=false, add --force to rebuild all files - // CLI defaults to incremental mode (skip unchanged files) - if (!incremental) { - args.push('--force'); - } - - if (indexType === 'normal') { - args.push('--no-embeddings'); - } else { - // Add embedding model selection for vector index (use --model, not --embedding-model) - args.push('--model', embeddingModel); - // Add embedding backend if not using default fastembed (use --backend, not --embedding-backend) - if (embeddingBackend && embeddingBackend !== 'fastembed') { - args.push('--backend', embeddingBackend); - } - // Add max workers for concurrent API calls (useful for litellm backend) - if (maxWorkers && maxWorkers > 1) { - args.push('--max-workers', String(maxWorkers)); - } - } - - // Broadcast start event - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'start', message: 'Starting index...', percent: 0, path: targetPath, indexType } - }); - - try { - const result = await executeCodexLens(args, { - cwd: targetPath, - timeout: 1800000, // 30 minutes for large codebases - onProgress: (progress: ProgressInfo) => { - // Broadcast progress to all connected clients - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { ...progress, path: targetPath } - }); - } - }); - - if (result.success) { - // Broadcast completion - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'complete', message: 'Index complete', percent: 100, path: targetPath } - }); - - try { - const parsed = extractJSON(result.output); - return { success: true, result: parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - // Broadcast error - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'error', message: result.error || 'Unknown error', percent: 0, path: targetPath } - }); - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - // Broadcast error - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'error', message: err.message, percent: 0, path: targetPath } - }); - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Cancel CodexLens Indexing - if (pathname === '/api/codexlens/cancel' && req.method === 'POST') { - const result = cancelIndexing(); - - // Broadcast cancellation event - if (result.success) { - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'cancelled', message: 'Indexing cancelled by user', percent: 0 } - }); - } - - res.writeHead(result.success ? 200 : 400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(result)); - return true; - } - - // API: Check if indexing is in progress - if (pathname === '/api/codexlens/indexing-status') { - const inProgress = isIndexingInProgress(); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, inProgress })); - return true; - } - - // API: Generate embeddings only (without FTS rebuild) - if (pathname === '/api/codexlens/embeddings/generate' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: projectPath, incremental = false, backend = 'litellm', maxWorkers = 4, model } = body; - const targetPath = projectPath || initialPath; - - // Ensure LiteLLM backend dependencies are installed - if (backend === 'litellm') { - try { - await ensureLiteLLMEmbedderReady(); - } catch (err) { - return { success: false, error: `LiteLLM embedder setup failed: ${err.message}` }; - } - } - - // Build CLI arguments for embeddings generation - // Use 'index embeddings' subcommand - const args = ['index', 'embeddings', targetPath, '--json']; - - // Add backend option - if (backend && backend !== 'fastembed') { - args.push('--backend', backend); - } - - // Add model if specified - if (model) { - args.push('--model', model); - } - - // Add max workers for API backend - if (backend === 'litellm' && maxWorkers > 1) { - args.push('--max-workers', String(maxWorkers)); - } - - // Force mode: always use --force for litellm backend to avoid model conflict - // (litellm uses different embeddings than fastembed, so regeneration is required) - // For true incremental updates with same model, use fastembed backend - if (!incremental || backend === 'litellm') { - args.push('--force'); // Force regenerate embeddings - } - - try { - // Broadcast progress start - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'embeddings', message: 'Generating embeddings...', percent: 10 } - }); - - const result = await executeCodexLens(args, { - cwd: targetPath, - onProgress: (progress: ProgressInfo) => { - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { - stage: 'embeddings', - message: progress.message || 'Processing...', - percent: progress.percent || 50 - } - }); - } - }); - - if (result.success) { - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'complete', message: 'Embeddings generated', percent: 100 } - }); - - try { - const parsed = extractJSON(result.output || '{}'); - return { success: true, result: parsed }; - } catch { - return { success: true, result: { message: 'Embeddings generated successfully' } }; - } - } else { - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'error', message: result.error || 'Failed', percent: 0 } - }); - return { success: false, error: result.error }; - } - } catch (err) { - broadcastToClients({ - type: 'CODEXLENS_INDEX_PROGRESS', - payload: { stage: 'error', message: err.message, percent: 0 } - }); - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Semantic Search Status - if (pathname === '/api/codexlens/semantic/status') { - const status = await checkSemanticStatus(); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(status)); - return true; - } - - // API: CodexLens Semantic Metadata List - if (pathname === '/api/codexlens/semantic/metadata') { - const offset = parseInt(url.searchParams.get('offset') || '0', 10); - const limit = parseInt(url.searchParams.get('limit') || '50', 10); - const tool = url.searchParams.get('tool') || ''; - const projectPath = url.searchParams.get('path') || initialPath; - - try { - const args = [ - 'semantic-list', - '--path', projectPath, - '--offset', offset.toString(), - '--limit', limit.toString(), - '--json' - ]; - if (tool) { - args.push('--tool', tool); - } - - const result = await executeCodexLens(args, { cwd: projectPath }); - - if (result.success) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(result.output); - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens LLM Enhancement (run enhance command) - if (pathname === '/api/codexlens/enhance' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: projectPath, tool = 'gemini', batchSize = 5, timeoutMs = 300000 } = body; - const targetPath = projectPath || initialPath; - - try { - const args = ['enhance', targetPath, '--tool', tool, '--batch-size', batchSize.toString()]; - const result = await executeCodexLens(args, { cwd: targetPath, timeout: timeoutMs + 30000 }); - if (result.success) { - try { - const parsed = extractJSON(result.output); - return { success: true, result: parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - - // API: CodexLens Search (FTS5 text search with mode support) - if (pathname === '/api/codexlens/search') { - const query = url.searchParams.get('query') || ''; - const limit = parseInt(url.searchParams.get('limit') || '20', 10); - const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector - const maxContentLength = parseInt(url.searchParams.get('max_content_length') || '200', 10); - const extraFilesCount = parseInt(url.searchParams.get('extra_files_count') || '10', 10); - const projectPath = url.searchParams.get('path') || initialPath; - - if (!query) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Query parameter is required' })); - return true; - } - - try { - // Request more results to support split (full content + extra files) - const totalToFetch = limit + extraFilesCount; - // Use --method instead of deprecated --mode - const args = ['search', query, '--path', projectPath, '--limit', totalToFetch.toString(), '--method', mode, '--json']; - - const result = await executeCodexLens(args, { cwd: projectPath }); - - if (result.success) { - try { - const parsed = extractJSON(result.output); - const allResults = parsed.result?.results || []; - - // Truncate content and split results - const truncateContent = (content: string | null | undefined): string => { - if (!content) return ''; - if (content.length <= maxContentLength) return content; - return content.slice(0, maxContentLength) + '...'; - }; - - // Split results: first N with full content, rest as file paths only - const resultsWithContent = allResults.slice(0, limit).map((r: any) => ({ - ...r, - content: truncateContent(r.content || r.excerpt), - excerpt: truncateContent(r.excerpt || r.content), - })); - - const extraResults = allResults.slice(limit, limit + extraFilesCount); - const extraFiles = [...new Set(extraResults.map((r: any) => r.path || r.file))]; - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - results: resultsWithContent, - extra_files: extraFiles.length > 0 ? extraFiles : undefined, - metadata: { - total: allResults.length, - limit, - max_content_length: maxContentLength, - extra_files_count: extraFilesCount, - }, - })); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, results: [], output: result.output })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens Search Files Only (return file paths only, with mode support) - if (pathname === '/api/codexlens/search_files') { - const query = url.searchParams.get('query') || ''; - const limit = parseInt(url.searchParams.get('limit') || '20', 10); - const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector - const projectPath = url.searchParams.get('path') || initialPath; - - if (!query) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Query parameter is required' })); - return true; - } - - try { - // Use --method instead of deprecated --mode - const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--method', mode, '--files-only', '--json']; - - const result = await executeCodexLens(args, { cwd: projectPath }); - - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, ...parsed.result })); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, files: [], output: result.output })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens Symbol Search (search for symbols by name) - if (pathname === '/api/codexlens/symbol') { - const query = url.searchParams.get('query') || ''; - const file = url.searchParams.get('file'); - const limit = parseInt(url.searchParams.get('limit') || '20', 10); - const projectPath = url.searchParams.get('path') || initialPath; - - if (!query && !file) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Either query or file parameter is required' })); - return true; - } - - try { - let args; - if (file) { - // Get symbols from a specific file - args = ['symbol', '--file', file, '--json']; - } else { - // Search for symbols by name - args = ['symbol', query, '--path', projectPath, '--limit', limit.toString(), '--json']; - } - - const result = await executeCodexLens(args, { cwd: projectPath }); - - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, ...parsed.result })); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, symbols: [], output: result.output })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - - // API: Detect GPU support for semantic search - if (pathname === '/api/codexlens/gpu/detect' && req.method === 'GET') { - try { - const gpuInfo = await detectGpuSupport(); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, ...gpuInfo })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: List available GPU devices for selection - if (pathname === '/api/codexlens/gpu/list' && req.method === 'GET') { - try { - // Try CodexLens gpu-list first if available - const venvStatus = await checkVenvStatus(); - if (venvStatus.ready) { - const result = await executeCodexLens(['gpu-list', '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - if (parsed.devices && parsed.devices.length > 0) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(parsed)); - return true; - } - } catch { - // Fall through to system detection - } - } - } - - // Fallback: Use system commands to detect GPUs - const devices: Array<{ name: string; type: string; index: number }> = []; - - if (process.platform === 'win32') { - // Windows: Use WMIC to get GPU info - try { - const { execSync } = await import('child_process'); - const wmicOutput = execSync('wmic path win32_VideoController get name', { - encoding: 'utf-8', - timeout: 10000, - stdio: ['pipe', 'pipe', 'pipe'] - }); - - const lines = wmicOutput.split('\n') - .map(line => line.trim()) - .filter(line => line && line !== 'Name'); - - lines.forEach((name, index) => { - if (name) { - const isIntegrated = name.toLowerCase().includes('intel') || - name.toLowerCase().includes('integrated'); - devices.push({ - name: name, - type: isIntegrated ? 'integrated' : 'discrete', - index: index - }); - } - }); - } catch (e) { - console.warn('[CodexLens] WMIC GPU detection failed:', (e as Error).message); - } - } else { - // Linux/Mac: Try nvidia-smi for NVIDIA GPUs - try { - const { execSync } = await import('child_process'); - const nvidiaOutput = execSync('nvidia-smi --query-gpu=name --format=csv,noheader', { - encoding: 'utf-8', - timeout: 10000, - stdio: ['pipe', 'pipe', 'pipe'] - }); - - const lines = nvidiaOutput.split('\n').filter(line => line.trim()); - lines.forEach((name, index) => { - devices.push({ - name: name.trim(), - type: 'discrete', - index: index - }); - }); - } catch { - // NVIDIA not available, that's fine - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, devices: devices, selected_device_id: null })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: Select GPU device for embedding - if (pathname === '/api/codexlens/gpu/select' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { device_id } = body; - - if (device_id === undefined || device_id === null) { - return { success: false, error: 'device_id is required', status: 400 }; - } - - try { - const result = await executeCodexLens(['gpu-select', String(device_id), '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - return parsed; - } catch { - return { success: true, message: 'GPU selected', output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Reset GPU selection to auto-detection - if (pathname === '/api/codexlens/gpu/reset' && req.method === 'POST') { - handlePostRequest(req, res, async () => { - try { - const result = await executeCodexLens(['gpu-reset', '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - return parsed; - } catch { - return { success: true, message: 'GPU selection reset', output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Semantic Search Install (with GPU mode support) - if (pathname === '/api/codexlens/semantic/install' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - try { - // Get GPU mode from request body, default to 'cpu' - const gpuMode: GpuMode = body?.gpuMode || 'cpu'; - const validModes: GpuMode[] = ['cpu', 'cuda', 'directml']; - - if (!validModes.includes(gpuMode)) { - return { success: false, error: `Invalid GPU mode: ${gpuMode}. Valid modes: ${validModes.join(', ')}`, status: 400 }; - } - - const result = await installSemantic(gpuMode); - if (result.success) { - const status = await checkSemanticStatus(); - const modeDescriptions = { - cpu: 'CPU (ONNX Runtime)', - cuda: 'NVIDIA CUDA GPU', - directml: 'Windows DirectML GPU' - }; - return { - success: true, - message: `Semantic search installed successfully with ${modeDescriptions[gpuMode]}`, - gpuMode, - ...status - }; - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Model List (list available embedding models) - if (pathname === '/api/codexlens/models' && req.method === 'GET') { - try { - // Check if CodexLens is installed first (without auto-installing) - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' })); - return true; - } - const result = await executeCodexLens(['model-list', '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(parsed)); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: CodexLens Model Download (download embedding model by profile) - if (pathname === '/api/codexlens/models/download' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { profile } = body; - - if (!profile) { - return { success: false, error: 'profile is required', status: 400 }; - } - - try { - const result = await executeCodexLens(['model-download', profile, '--json'], { timeout: 600000 }); // 10 min for download - if (result.success) { - try { - const parsed = extractJSON(result.output); - return { success: true, ...parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Model Delete (delete embedding model by profile) - if (pathname === '/api/codexlens/models/delete' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { profile } = body; - - if (!profile) { - return { success: false, error: 'profile is required', status: 400 }; - } - - try { - const result = await executeCodexLens(['model-delete', profile, '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - return { success: true, ...parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: CodexLens Model Info (get model info by profile) - if (pathname === '/api/codexlens/models/info' && req.method === 'GET') { - const profile = url.searchParams.get('profile'); - - if (!profile) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'profile parameter is required' })); - return true; - } - - try { - const result = await executeCodexLens(['model-info', profile, '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(parsed)); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Failed to parse response' })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // ============================================================ - // RERANKER CONFIGURATION ENDPOINTS - // ============================================================ - - // API: Get Reranker Configuration - if (pathname === '/api/codexlens/reranker/config' && req.method === 'GET') { - try { - const venvStatus = await checkVenvStatus(); - - // Default reranker config (matches fastembed default) - const rerankerConfig = { - backend: 'fastembed', - model_name: 'Xenova/ms-marco-MiniLM-L-6-v2', - api_provider: 'siliconflow', - api_key_set: false, - available_backends: ['onnx', 'api', 'litellm', 'legacy'], - api_providers: ['siliconflow', 'cohere', 'jina'], - litellm_endpoints: [] as string[], - config_source: 'default' - }; - - // Load LiteLLM endpoints for dropdown - try { - const litellmConfig = loadLiteLLMApiConfig(initialPath); - if (litellmConfig.endpoints && Array.isArray(litellmConfig.endpoints)) { - rerankerConfig.litellm_endpoints = litellmConfig.endpoints.map( - (ep: any) => ep.alias || ep.name || ep.baseUrl - ).filter(Boolean); - } - } catch (e) { - // LiteLLM config not available, continue with empty endpoints - } - - // If CodexLens is installed, try to get actual config - if (venvStatus.ready) { - try { - const result = await executeCodexLens(['config', '--json']); - if (result.success) { - const config = extractJSON(result.output); - if (config.success && config.result) { - // Map config values - if (config.result.reranker_backend) { - rerankerConfig.backend = config.result.reranker_backend; - rerankerConfig.config_source = 'codexlens'; - } - if (config.result.reranker_model) { - rerankerConfig.model_name = config.result.reranker_model; - } - if (config.result.reranker_api_provider) { - rerankerConfig.api_provider = config.result.reranker_api_provider; - } - // Check if API key is set (from env) - if (process.env.RERANKER_API_KEY) { - rerankerConfig.api_key_set = true; - } - } - } - } catch (e) { - console.error('[CodexLens] Failed to get reranker config:', e); - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, ...rerankerConfig })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: Set Reranker Configuration - if (pathname === '/api/codexlens/reranker/config' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { backend, model_name, api_provider, api_key, litellm_endpoint } = body; - - // Validate backend - const validBackends = ['onnx', 'api', 'litellm', 'legacy', 'fastembed']; - if (backend && !validBackends.includes(backend)) { - return { success: false, error: `Invalid backend: ${backend}. Valid options: ${validBackends.join(', ')}`, status: 400 }; - } - - // Validate api_provider - const validProviders = ['siliconflow', 'cohere', 'jina']; - if (api_provider && !validProviders.includes(api_provider)) { - return { success: false, error: `Invalid api_provider: ${api_provider}. Valid options: ${validProviders.join(', ')}`, status: 400 }; - } - - try { - const updates: string[] = []; - - // Set backend - if (backend) { - const result = await executeCodexLens(['config', 'set', 'reranker_backend', backend, '--json']); - if (result.success) updates.push('backend'); - } - - // Set model - if (model_name) { - const result = await executeCodexLens(['config', 'set', 'reranker_model', model_name, '--json']); - if (result.success) updates.push('model_name'); - } - - // Set API provider - if (api_provider) { - const result = await executeCodexLens(['config', 'set', 'reranker_api_provider', api_provider, '--json']); - if (result.success) updates.push('api_provider'); - } - - // Set LiteLLM endpoint - if (litellm_endpoint) { - const result = await executeCodexLens(['config', 'set', 'reranker_litellm_endpoint', litellm_endpoint, '--json']); - if (result.success) updates.push('litellm_endpoint'); - } - - // Handle API key - write to .env file or environment - if (api_key) { - // For security, we store in process.env for the current session - // In production, this should be written to a secure .env file - process.env.RERANKER_API_KEY = api_key; - updates.push('api_key'); - } - - return { - success: true, - message: `Updated: ${updates.join(', ')}`, - updated_fields: updates - }; - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // ============================================================ - // RERANKER MODEL MANAGEMENT ENDPOINTS - // ============================================================ - - // API: List Reranker Models (list available reranker models) - if (pathname === '/api/codexlens/reranker/models' && req.method === 'GET') { - try { - // Check if CodexLens is installed first - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' })); - return true; - } - const result = await executeCodexLens(['reranker-model-list', '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(parsed)); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: Download Reranker Model (download reranker model by profile) - if (pathname === '/api/codexlens/reranker/models/download' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { profile } = body; - - if (!profile) { - return { success: false, error: 'profile is required', status: 400 }; - } - - try { - const result = await executeCodexLens(['reranker-model-download', profile, '--json'], { timeout: 600000 }); // 10 min for download - if (result.success) { - try { - const parsed = extractJSON(result.output); - return { success: true, ...parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Delete Reranker Model (delete reranker model by profile) - if (pathname === '/api/codexlens/reranker/models/delete' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { profile } = body; - - if (!profile) { - return { success: false, error: 'profile is required', status: 400 }; - } - - try { - const result = await executeCodexLens(['reranker-model-delete', profile, '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - return { success: true, ...parsed }; - } catch { - return { success: true, output: result.output }; - } - } else { - return { success: false, error: result.error, status: 500 }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Reranker Model Info (get reranker model info by profile) - if (pathname === '/api/codexlens/reranker/models/info' && req.method === 'GET') { - const profile = url.searchParams.get('profile'); - - if (!profile) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'profile parameter is required' })); - return true; - } - - try { - const result = await executeCodexLens(['reranker-model-info', profile, '--json']); - if (result.success) { - try { - const parsed = extractJSON(result.output); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(parsed)); - } catch { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Failed to parse response' })); - } - } else { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: result.error })); - } - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // ============================================================ - // FILE WATCHER CONTROL ENDPOINTS - // ============================================================ - - // API: Get File Watcher Status - // API: Get File Watcher Status - // Supports ?path= query parameter for specific watcher - // Returns all watchers if no path specified - if (pathname === '/api/codexlens/watch/status') { - const queryPath = url.searchParams.get('path'); - - if (queryPath) { - // Return status for specific path - const normalizedPath = normalizePath(queryPath); - const watcher = activeWatchers.get(normalizedPath); - - if (watcher) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - running: watcher.stats.running, - root_path: watcher.stats.root_path, - events_processed: watcher.stats.events_processed, - start_time: watcher.stats.start_time?.toISOString() || null, - uptime_seconds: watcher.stats.start_time - ? Math.floor((Date.now() - watcher.stats.start_time.getTime()) / 1000) - : 0 - })); - } else { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - running: false, - root_path: '', - events_processed: 0, - start_time: null, - uptime_seconds: 0 - })); - } - } else { - // Return all watchers - const watchers = Array.from(activeWatchers.entries()).map(([path, watcher]) => ({ - root_path: watcher.stats.root_path, - running: watcher.stats.running, - events_processed: watcher.stats.events_processed, - start_time: watcher.stats.start_time?.toISOString() || null, - uptime_seconds: watcher.stats.start_time - ? Math.floor((Date.now() - watcher.stats.start_time.getTime()) / 1000) - : 0 - })); - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - watchers, - count: watchers.length - })); - } - return true; - } - - // API: Start File Watcher - if (pathname === '/api/codexlens/watch/start' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: watchPath, debounce_ms = 1000 } = body; - const targetPath = watchPath || initialPath; - const normalizedPath = normalizePath(targetPath); - - // Check if watcher already running for this path - if (activeWatchers.has(normalizedPath)) { - return { success: false, error: 'Watcher already running for this path', status: 400 }; - } - - try { - // Start watcher process using new architecture - const result = await startWatcherProcess(targetPath, debounce_ms, broadcastToClients); - - if (!result.success) { - return { success: false, error: result.error, status: 400 }; - } - - // Persist to config file - const config = readWatcherConfig(); - config[normalizedPath] = { - enabled: true, - debounce_ms - }; - writeWatcherConfig(config); - - return { - success: true, - message: 'Watcher started and persisted to config', - path: targetPath, - pid: result.pid - }; - } catch (err: any) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - - // API: Stop File Watcher - if (pathname === '/api/codexlens/watch/stop' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: watchPath } = body; - const targetPath = watchPath || initialPath; - const normalizedPath = normalizePath(targetPath); - - // Check if watcher is running for this path - if (!activeWatchers.has(normalizedPath)) { - return { success: false, error: 'Watcher not running for this path', status: 400 }; - } - - try { - // Stop watcher process using new architecture - const result = await stopWatcherProcess(targetPath, broadcastToClients); - - if (!result.success) { - return { success: false, error: result.error, status: 400 }; - } - - // Update config file - disable watcher - const config = readWatcherConfig(); - if (config[normalizedPath]) { - config[normalizedPath].enabled = false; - writeWatcherConfig(config); - } - - return { - success: true, - message: 'Watcher stopped', - events_processed: result.stats?.events_processed || 0, - uptime_seconds: result.stats?.uptime_seconds || 0 - }; - } catch (err: any) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Get Pending Queue Status - if (pathname === '/api/codexlens/watch/queue' && req.method === 'GET') { - const queryPath = url.searchParams.get('path'); - const targetPath = queryPath || initialPath; - const normalizedPath = normalizePath(targetPath); - const watcher = activeWatchers.get(normalizedPath); - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - queue: watcher?.stats.pending_queue || { file_count: 0, files: [], countdown_seconds: 0, last_event_time: null } - })); - return true; - } - - // API: Flush Pending Queue (Immediate Index) - if (pathname === '/api/codexlens/watch/flush' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: watchPath } = body; - const targetPath = watchPath || initialPath; - const normalizedPath = normalizePath(targetPath); - - const watcher = activeWatchers.get(normalizedPath); - if (!watcher) { - return { success: false, error: 'Watcher not running for this path', status: 400 }; - } - - try { - // Create flush.signal file to trigger immediate indexing - const signalDir = path.join(targetPath, '.codexlens'); - const signalFile = path.join(signalDir, 'flush.signal'); - - if (!fs.existsSync(signalDir)) { - fs.mkdirSync(signalDir, { recursive: true }); - } - fs.writeFileSync(signalFile, Date.now().toString()); - - return { success: true, message: 'Flush signal sent' }; - } catch (err: any) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Get Index History - if (pathname === '/api/codexlens/watch/history' && req.method === 'GET') { - const queryPath = url.searchParams.get('path'); - const limitParam = url.searchParams.get('limit'); - const limit = limitParam ? parseInt(limitParam, 10) : 10; - const targetPath = queryPath || initialPath; - const normalizedPath = normalizePath(targetPath); - const watcher = activeWatchers.get(normalizedPath); - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - history: watcher?.stats.index_history?.slice(-limit) || [] - })); - return true; - } - - - - // ============================================================ - // SPLADE ENDPOINTS - // ============================================================ - - // API: SPLADE Status - Check if SPLADE is available and installed - if (pathname === '/api/codexlens/splade/status') { - try { - // Check if CodexLens is installed first - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - available: false, - installed: false, - model: 'naver/splade-cocondenser-ensembledistil', - error: 'CodexLens not installed' - })); - return true; - } - - // Check SPLADE availability using Python check - const result = await executeCodexLens(['python', '-c', - 'from codexlens.semantic.splade_encoder import check_splade_available; ok, err = check_splade_available(); print("OK" if ok else err)' - ]); - - const available = result.output.includes('OK'); - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - available, - installed: available, - model: 'naver/splade-cocondenser-ensembledistil', - error: available ? null : result.output.trim() - })); - } catch (err) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - available: false, - installed: false, - model: 'naver/splade-cocondenser-ensembledistil', - error: err.message - })); - } - return true; - } - - // API: SPLADE Install - Install SPLADE dependencies - if (pathname === '/api/codexlens/splade/install' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - try { - const gpu = body?.gpu || false; - const packageName = gpu ? 'codex-lens[splade-gpu]' : 'codex-lens[splade]'; - - // Use pip to install the SPLADE extras - const { spawn } = await import('child_process'); - const { promisify } = await import('util'); - const execFilePromise = promisify(require('child_process').execFile); - - const result = await execFilePromise('pip', ['install', packageName], { - timeout: 600000 // 10 minutes - }); - - return { - success: true, - message: `SPLADE installed successfully (${gpu ? 'GPU' : 'CPU'} mode)`, - output: result.stdout - }; - } catch (err) { - return { - success: false, - error: err.message, - stderr: err.stderr, - status: 500 - }; - } - }); - return true; - } - - // API: SPLADE Index Status - Check if SPLADE index exists for a project - if (pathname === '/api/codexlens/splade/index-status') { - try { - const projectPath = url.searchParams.get('path'); - if (!projectPath) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: 'Missing path parameter' })); - return true; - } - - // Check if CodexLens is installed first - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ exists: false, error: 'CodexLens not installed' })); - return true; - } - - const { join } = await import('path'); - const indexDb = join(projectPath, '.codexlens', '_index.db'); - - // Use Python to check SPLADE index status - const pythonCode = ` -from codexlens.storage.splade_index import SpladeIndex -from pathlib import Path -try: - idx = SpladeIndex(Path("${indexDb.replace(/\\/g, '\\\\')}")) - if idx.has_index(): - stats = idx.get_stats() - meta = idx.get_metadata() - model = meta.get('model_name', '') if meta else '' - print(f"OK|{stats['unique_chunks']}|{stats['total_postings']}|{model}") - else: - print("NO_INDEX") -except Exception as e: - print(f"ERROR|{str(e)}") -`; - - const result = await executeCodexLens(['python', '-c', pythonCode]); - - if (result.output.startsWith('OK|')) { - const parts = result.output.trim().split('|'); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - exists: true, - chunks: parseInt(parts[1]), - postings: parseInt(parts[2]), - model: parts[3] - })); - } else if (result.output.startsWith('ERROR|')) { - const errorMsg = result.output.substring(6).trim(); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ exists: false, error: errorMsg })); - } else { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ exists: false })); - } - } catch (err) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ exists: false, error: err.message })); - } - return true; - } - - // API: SPLADE Index Rebuild - Rebuild SPLADE index for a project - if (pathname === '/api/codexlens/splade/rebuild' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { path: projectPath } = body; - - if (!projectPath) { - return { success: false, error: 'Missing path parameter', status: 400 }; - } - - try { - // Use 'index splade' instead of deprecated 'splade-index' - const result = await executeCodexLens(['index', 'splade', projectPath, '--rebuild'], { - cwd: projectPath, - timeout: 1800000 // 30 minutes for large codebases - }); - - if (result.success) { - return { - success: true, - message: 'SPLADE index rebuilt successfully', - output: result.output - }; - } else { - return { - success: false, - error: result.error || 'Failed to rebuild SPLADE index', - output: result.output, - status: 500 - }; - } - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // ============================================================ - // ENV FILE MANAGEMENT ENDPOINTS - // ============================================================ - - // API: Get global env file content - if (pathname === '/api/codexlens/env' && req.method === 'GET') { - try { - const { homedir } = await import('os'); - const { join } = await import('path'); - const { readFile } = await import('fs/promises'); - - const envPath = join(homedir(), '.codexlens', '.env'); - let content = ''; - try { - content = await readFile(envPath, 'utf-8'); - } catch (e) { - // File doesn't exist, return empty - } - - // Parse env file into key-value pairs (robust parsing) - const envVars: Record = {}; - const lines = content.split('\n'); - for (const line of lines) { - const trimmed = line.trim(); - // Skip empty lines and comments - if (!trimmed || trimmed.startsWith('#')) continue; - - // Find first = that's part of key=value (not in a quote) - const eqIndex = trimmed.indexOf('='); - if (eqIndex <= 0) continue; - - const key = trimmed.substring(0, eqIndex).trim(); - // Validate key format (alphanumeric + underscore) - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue; - - let value = trimmed.substring(eqIndex + 1); - - // Handle quoted values (preserves = inside quotes) - if (value.startsWith('"')) { - // Find matching closing quote (handle escaped quotes) - let end = 1; - while (end < value.length) { - if (value[end] === '"' && value[end - 1] !== '\\') break; - end++; - } - value = value.substring(1, end).replace(/\\"/g, '"'); - } else if (value.startsWith("'")) { - // Single quotes don't support escaping - const end = value.indexOf("'", 1); - value = end > 0 ? value.substring(1, end) : value.substring(1); - } else { - // Unquoted: trim and take until comment or end - const commentIndex = value.indexOf(' #'); - if (commentIndex > 0) { - value = value.substring(0, commentIndex); - } - value = value.trim(); - } - - envVars[key] = value; - } - - // Also read settings.json for current configuration - const settingsPath = join(homedir(), '.codexlens', 'settings.json'); - let settings: Record = {}; - try { - const settingsContent = await readFile(settingsPath, 'utf-8'); - settings = JSON.parse(settingsContent); - } catch (e) { - // Settings file doesn't exist or is invalid, use empty - } - - // Map settings to env var format for defaults - const settingsDefaults: Record = {}; - - // Embedding settings - if (settings.embedding?.backend) { - settingsDefaults['CODEXLENS_EMBEDDING_BACKEND'] = settings.embedding.backend; - } - if (settings.embedding?.model) { - settingsDefaults['CODEXLENS_EMBEDDING_MODEL'] = settings.embedding.model; - settingsDefaults['LITELLM_EMBEDDING_MODEL'] = settings.embedding.model; - } - if (settings.embedding?.use_gpu !== undefined) { - settingsDefaults['CODEXLENS_USE_GPU'] = String(settings.embedding.use_gpu); - } - if (settings.embedding?.strategy) { - settingsDefaults['CODEXLENS_EMBEDDING_STRATEGY'] = settings.embedding.strategy; - } - if (settings.embedding?.cooldown !== undefined) { - settingsDefaults['CODEXLENS_EMBEDDING_COOLDOWN'] = String(settings.embedding.cooldown); - } - - // Reranker settings - if (settings.reranker?.backend) { - settingsDefaults['CODEXLENS_RERANKER_BACKEND'] = settings.reranker.backend; - } - if (settings.reranker?.model) { - settingsDefaults['CODEXLENS_RERANKER_MODEL'] = settings.reranker.model; - settingsDefaults['LITELLM_RERANKER_MODEL'] = settings.reranker.model; - } - if (settings.reranker?.enabled !== undefined) { - settingsDefaults['CODEXLENS_RERANKER_ENABLED'] = String(settings.reranker.enabled); - } - if (settings.reranker?.top_k !== undefined) { - settingsDefaults['CODEXLENS_RERANKER_TOP_K'] = String(settings.reranker.top_k); - } - - // API/Concurrency settings - if (settings.api?.max_workers !== undefined) { - settingsDefaults['CODEXLENS_API_MAX_WORKERS'] = String(settings.api.max_workers); - } - if (settings.api?.batch_size !== undefined) { - settingsDefaults['CODEXLENS_API_BATCH_SIZE'] = String(settings.api.batch_size); - } - - // Cascade search settings - if (settings.cascade?.strategy) { - settingsDefaults['CODEXLENS_CASCADE_STRATEGY'] = settings.cascade.strategy; - } - if (settings.cascade?.coarse_k !== undefined) { - settingsDefaults['CODEXLENS_CASCADE_COARSE_K'] = String(settings.cascade.coarse_k); - } - if (settings.cascade?.fine_k !== undefined) { - settingsDefaults['CODEXLENS_CASCADE_FINE_K'] = String(settings.cascade.fine_k); - } - - // LLM settings - if (settings.llm?.enabled !== undefined) { - settingsDefaults['CODEXLENS_LLM_ENABLED'] = String(settings.llm.enabled); - } - if (settings.llm?.batch_size !== undefined) { - settingsDefaults['CODEXLENS_LLM_BATCH_SIZE'] = String(settings.llm.batch_size); - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - path: envPath, - env: envVars, - raw: content, - settings: settingsDefaults - })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - - // API: Save global env file content (merge mode - preserves existing values) - if (pathname === '/api/codexlens/env' && req.method === 'POST') { - handlePostRequest(req, res, async (body) => { - const { env } = body as { env: Record }; - - if (!env || typeof env !== 'object') { - return { success: false, error: 'env object is required', status: 400 }; - } - - try { - const { homedir } = await import('os'); - const { join, dirname } = await import('path'); - const { writeFile, mkdir, readFile } = await import('fs/promises'); - - const envPath = join(homedir(), '.codexlens', '.env'); - await mkdir(dirname(envPath), { recursive: true }); - - // Read existing env file to preserve custom variables - let existingEnv: Record = {}; - let existingComments: string[] = []; - try { - const content = await readFile(envPath, 'utf-8'); - const lines = content.split('\n'); - for (const line of lines) { - const trimmed = line.trim(); - // Preserve comment lines that aren't our headers - if (trimmed.startsWith('#') && !trimmed.includes('Managed by CCW')) { - if (!trimmed.includes('Reranker API') && !trimmed.includes('Embedding API') && - !trimmed.includes('LiteLLM Config') && !trimmed.includes('CodexLens Settings') && - !trimmed.includes('Other Settings') && !trimmed.includes('CodexLens Environment')) { - existingComments.push(line); - } - } - if (!trimmed || trimmed.startsWith('#')) continue; - - // Robust parsing (same as GET handler) - const eqIndex = trimmed.indexOf('='); - if (eqIndex <= 0) continue; - - const key = trimmed.substring(0, eqIndex).trim(); - if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue; - - let value = trimmed.substring(eqIndex + 1); - if (value.startsWith('"')) { - let end = 1; - while (end < value.length) { - if (value[end] === '"' && value[end - 1] !== '\\') break; - end++; - } - value = value.substring(1, end).replace(/\\"/g, '"'); - } else if (value.startsWith("'")) { - const end = value.indexOf("'", 1); - value = end > 0 ? value.substring(1, end) : value.substring(1); - } else { - const commentIndex = value.indexOf(' #'); - if (commentIndex > 0) value = value.substring(0, commentIndex); - value = value.trim(); - } - existingEnv[key] = value; - } - } catch (e) { - // File doesn't exist, start fresh - } - - // Merge: update known keys from payload, preserve unknown keys - const knownKeys = new Set([ - 'RERANKER_API_KEY', 'RERANKER_API_BASE', 'RERANKER_MODEL', - 'EMBEDDING_API_KEY', 'EMBEDDING_API_BASE', 'EMBEDDING_MODEL', - 'LITELLM_API_KEY', 'LITELLM_API_BASE', 'LITELLM_MODEL' - ]); - - // Apply updates from payload - for (const [key, value] of Object.entries(env)) { - if (value) { - existingEnv[key] = value; - } else if (knownKeys.has(key)) { - // Remove known key if value is empty - delete existingEnv[key]; - } - } - - // Build env file content - const lines = [ - '# CodexLens Environment Configuration', - '# Managed by CCW Dashboard', - '' - ]; - - // Add preserved custom comments - if (existingComments.length > 0) { - lines.push(...existingComments, ''); - } - - // Group by prefix - const groups: Record = { - 'RERANKER': [], - 'EMBEDDING': [], - 'LITELLM': [], - 'CODEXLENS': [], - 'OTHER': [] - }; - - for (const [key, value] of Object.entries(existingEnv)) { - if (!value) continue; - // SECURITY: Escape special characters to prevent .env injection - const escapedValue = value - .replace(/\\/g, '\\\\') // Escape backslashes first - .replace(/"/g, '\\"') // Escape double quotes - .replace(/\n/g, '\\n') // Escape newlines - .replace(/\r/g, '\\r'); // Escape carriage returns - const line = `${key}="${escapedValue}"`; - if (key.startsWith('RERANKER_')) groups['RERANKER'].push(line); - else if (key.startsWith('EMBEDDING_')) groups['EMBEDDING'].push(line); - else if (key.startsWith('LITELLM_')) groups['LITELLM'].push(line); - else if (key.startsWith('CODEXLENS_')) groups['CODEXLENS'].push(line); - else groups['OTHER'].push(line); - } - - // Add grouped content - if (groups['RERANKER'].length) { - lines.push('# Reranker API Configuration'); - lines.push(...groups['RERANKER'], ''); - } - if (groups['EMBEDDING'].length) { - lines.push('# Embedding API Configuration'); - lines.push(...groups['EMBEDDING'], ''); - } - if (groups['LITELLM'].length) { - lines.push('# LiteLLM Configuration'); - lines.push(...groups['LITELLM'], ''); - } - if (groups['CODEXLENS'].length) { - lines.push('# CodexLens Settings'); - lines.push(...groups['CODEXLENS'], ''); - } - if (groups['OTHER'].length) { - lines.push('# Other Settings'); - lines.push(...groups['OTHER'], ''); - } - - await writeFile(envPath, lines.join('\n'), 'utf-8'); - - // Also update settings.json with mapped values - const settingsPath = join(homedir(), '.codexlens', 'settings.json'); - let settings: Record = {}; - try { - const settingsContent = await readFile(settingsPath, 'utf-8'); - settings = JSON.parse(settingsContent); - } catch (e) { - // File doesn't exist, create default structure - settings = { embedding: {}, reranker: {}, api: {}, cascade: {}, llm: {} }; - } - - // Map env vars to settings.json structure - const envToSettings: Record any }> = { - 'CODEXLENS_EMBEDDING_BACKEND': { path: ['embedding', 'backend'] }, - 'CODEXLENS_EMBEDDING_MODEL': { path: ['embedding', 'model'] }, - 'CODEXLENS_USE_GPU': { path: ['embedding', 'use_gpu'], transform: v => v === 'true' }, - 'CODEXLENS_EMBEDDING_STRATEGY': { path: ['embedding', 'strategy'] }, - 'CODEXLENS_EMBEDDING_COOLDOWN': { path: ['embedding', 'cooldown'], transform: v => parseFloat(v) }, - 'CODEXLENS_RERANKER_BACKEND': { path: ['reranker', 'backend'] }, - 'CODEXLENS_RERANKER_MODEL': { path: ['reranker', 'model'] }, - 'CODEXLENS_RERANKER_ENABLED': { path: ['reranker', 'enabled'], transform: v => v === 'true' }, - 'CODEXLENS_RERANKER_TOP_K': { path: ['reranker', 'top_k'], transform: v => parseInt(v, 10) }, - 'CODEXLENS_API_MAX_WORKERS': { path: ['api', 'max_workers'], transform: v => parseInt(v, 10) }, - 'CODEXLENS_API_BATCH_SIZE': { path: ['api', 'batch_size'], transform: v => parseInt(v, 10) }, - 'CODEXLENS_CASCADE_STRATEGY': { path: ['cascade', 'strategy'] }, - 'CODEXLENS_CASCADE_COARSE_K': { path: ['cascade', 'coarse_k'], transform: v => parseInt(v, 10) }, - 'CODEXLENS_CASCADE_FINE_K': { path: ['cascade', 'fine_k'], transform: v => parseInt(v, 10) }, - 'CODEXLENS_LLM_ENABLED': { path: ['llm', 'enabled'], transform: v => v === 'true' }, - 'CODEXLENS_LLM_BATCH_SIZE': { path: ['llm', 'batch_size'], transform: v => parseInt(v, 10) }, - 'LITELLM_EMBEDDING_MODEL': { path: ['embedding', 'model'] }, - 'LITELLM_RERANKER_MODEL': { path: ['reranker', 'model'] } - }; - - // Apply env vars to settings - for (const [envKey, value] of Object.entries(env)) { - const mapping = envToSettings[envKey]; - if (mapping && value) { - const [section, key] = mapping.path; - if (!settings[section]) settings[section] = {}; - settings[section][key] = mapping.transform ? mapping.transform(value) : value; - } - } - - // Write updated settings - await writeFile(settingsPath, JSON.stringify(settings, null, 2), 'utf-8'); - - return { - success: true, - message: 'Environment and settings configuration saved', - path: envPath, - settingsPath - }; - } catch (err) { - return { success: false, error: err.message, status: 500 }; - } - }); - return true; - } - - // API: Get workspace index status (FTS and Vector coverage percentages) - if (pathname === '/api/codexlens/workspace-status') { - try { - const projectPath = url.searchParams.get('path') || initialPath; - - // Check if CodexLens is installed first - const venvStatus = await checkVenvStatus(); - if (!venvStatus.ready) { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - hasIndex: false, - fts: { indexed: false, percent: 0 }, - vector: { indexed: false, percent: 0 }, - message: 'CodexLens not installed' - })); - return true; - } - - let ftsStatus = { indexed: false, percent: 0, totalFiles: 0, indexedFiles: 0 }; - let vectorStatus = { indexed: false, percent: 0, totalFiles: 0, filesWithEmbeddings: 0, totalChunks: 0 }; - let hasIndex = false; - let indexRoot = ''; - - // First, get project info to check if index exists - const projectsResult = await executeCodexLens(['projects', 'show', projectPath, '--json']); - - if (projectsResult.success && projectsResult.output) { - try { - const projectData = extractJSON(projectsResult.output); - if (projectData.success && projectData.result) { - const project = projectData.result; - hasIndex = true; - indexRoot = project.index_root || ''; - - // FTS is always 100% when index exists - ftsStatus = { - indexed: true, - percent: 100, - totalFiles: project.total_files || 0, - indexedFiles: project.total_files || 0 - }; - - // Now get embeddings status for this specific project - const statusResult = await executeCodexLens(['index', 'status', projectPath, '--json']); - if (statusResult.success && statusResult.output) { - try { - const status = extractJSON(statusResult.output); - if (status.success && status.result && status.result.embeddings) { - const embeddings = status.result.embeddings; - - // Find the project-specific embedding info from indexes array - const indexes = embeddings.indexes || []; - let projectEmbedding = null; - - // Look for matching project by path or name - const { basename, resolve } = await import('path'); - const normalizedPath = resolve(projectPath).toLowerCase(); - const projectName = basename(projectPath); - - for (const idx of indexes) { - const idxPath = (idx.path || '').toLowerCase(); - const idxProject = (idx.project || '').toLowerCase(); - if (idxPath.includes(normalizedPath.replace(/\\/g, '/')) || - idxPath.includes(normalizedPath) || - idxProject === projectName.toLowerCase()) { - projectEmbedding = idx; - break; - } - } - - if (projectEmbedding) { - vectorStatus = { - indexed: projectEmbedding.has_embeddings || false, - percent: projectEmbedding.coverage_percent || 0, - totalFiles: projectEmbedding.total_files || project.total_files || 0, - filesWithEmbeddings: Math.round((projectEmbedding.coverage_percent || 0) * (projectEmbedding.total_files || 0) / 100), - totalChunks: projectEmbedding.total_chunks || 0 - }; - } else { - // No specific project found, use aggregated stats - vectorStatus = { - indexed: embeddings.indexes_with_embeddings > 0, - percent: 0, - totalFiles: project.total_files || 0, - filesWithEmbeddings: 0, - totalChunks: 0 - }; - } - } - } catch (e) { - console.error('[CodexLens] Failed to parse index status:', e.message); - } - } - } - } catch (e) { - console.error('[CodexLens] Failed to parse project data:', e.message); - } - } - - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ - success: true, - hasIndex, - indexRoot, - path: projectPath, - fts: ftsStatus, - vector: vectorStatus - })); - } catch (err) { - res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ success: false, error: err.message })); - } - return true; - } - + if (await handleCodexLensIndexRoutes(ctx)) return true; + if (await handleCodexLensConfigRoutes(ctx)) return true; + if (await handleCodexLensSemanticRoutes(ctx)) return true; + if (await handleCodexLensWatcherRoutes(ctx)) return true; return false; } + diff --git a/ccw/src/core/routes/codexlens/README.md b/ccw/src/core/routes/codexlens/README.md new file mode 100644 index 00000000..0c2ea75c --- /dev/null +++ b/ccw/src/core/routes/codexlens/README.md @@ -0,0 +1,37 @@ +# CodexLens Routes + +CodexLens-related HTTP endpoints are handled by `ccw/src/core/routes/codexlens-routes.ts`, which delegates to handler modules in this directory. Each handler returns `true` when it handles the current request. + +## File Map + +- `ccw/src/core/routes/codexlens/utils.ts` – shared helpers (ANSI stripping + robust JSON extraction from CLI output). +- `ccw/src/core/routes/codexlens/index-handlers.ts` – index/project management endpoints: + - `GET /api/codexlens/indexes` + - `POST /api/codexlens/clean` + - `POST /api/codexlens/init` + - `POST /api/codexlens/cancel` + - `GET /api/codexlens/indexing-status` +- `ccw/src/core/routes/codexlens/config-handlers.ts` – install/config/environment endpoints: + - `GET /api/codexlens/status` + - `GET /api/codexlens/dashboard-init` + - `POST /api/codexlens/bootstrap` + - `POST /api/codexlens/uninstall` + - `GET /api/codexlens/config` + - `POST /api/codexlens/config` + - GPU: `GET /api/codexlens/gpu/detect`, `GET /api/codexlens/gpu/list`, `POST /api/codexlens/gpu/select`, `POST /api/codexlens/gpu/reset` + - Models: `GET /api/codexlens/models`, `POST /api/codexlens/models/download`, `POST /api/codexlens/models/delete`, `GET /api/codexlens/models/info` + - Env: `GET /api/codexlens/env`, `POST /api/codexlens/env` +- `ccw/src/core/routes/codexlens/semantic-handlers.ts` – semantic search + reranker + SPLADE endpoints: + - Semantic: `GET /api/codexlens/semantic/status`, `GET /api/codexlens/semantic/metadata`, `POST /api/codexlens/semantic/install` + - Search: `GET /api/codexlens/search`, `GET /api/codexlens/search_files`, `GET /api/codexlens/symbol`, `POST /api/codexlens/enhance` + - Reranker: `GET /api/codexlens/reranker/config`, `POST /api/codexlens/reranker/config`, `GET /api/codexlens/reranker/models`, `POST /api/codexlens/reranker/models/download`, `POST /api/codexlens/reranker/models/delete`, `GET /api/codexlens/reranker/models/info` + - SPLADE: `GET /api/codexlens/splade/status`, `POST /api/codexlens/splade/install`, `GET /api/codexlens/splade/index-status`, `POST /api/codexlens/splade/rebuild` +- `ccw/src/core/routes/codexlens/watcher-handlers.ts` – file watcher endpoints: + - `GET /api/codexlens/watch/status` + - `POST /api/codexlens/watch/start` + - `POST /api/codexlens/watch/stop` + - Also exports `stopWatcherForUninstall()` used during uninstall flow. + +## Notes + +- CodexLens CLI output may include logging + ANSI escapes even with `--json`; handlers use `extractJSON()` from `utils.ts` to parse reliably. diff --git a/ccw/src/core/routes/codexlens/config-handlers.ts b/ccw/src/core/routes/codexlens/config-handlers.ts new file mode 100644 index 00000000..2c27f34e --- /dev/null +++ b/ccw/src/core/routes/codexlens/config-handlers.ts @@ -0,0 +1,913 @@ +/** + * CodexLens configuration + environment handlers. + */ + +import { + bootstrapVenv, + cancelIndexing, + checkSemanticStatus, + checkVenvStatus, + detectGpuSupport, + executeCodexLens, + isIndexingInProgress, + uninstallCodexLens, +} from '../../../tools/codex-lens.js'; +import type { RouteContext } from '../types.js'; +import { EXEC_TIMEOUTS } from '../../../utils/exec-constants.js'; +import { extractJSON } from './utils.js'; +import { stopWatcherForUninstall } from './watcher-handlers.js'; + +export async function handleCodexLensConfigRoutes(ctx: RouteContext): Promise { + const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx; + + // API: CodexLens Status + if (pathname === '/api/codexlens/status') { + const status = await checkVenvStatus(); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(status)); + return true; + } + + // API: CodexLens Dashboard Init - Aggregated endpoint for page initialization + if (pathname === '/api/codexlens/dashboard-init') { + try { + const venvStatus = await checkVenvStatus(); + + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + installed: false, + status: venvStatus, + config: { index_dir: '~/.codexlens/indexes', index_count: 0 }, + semantic: { available: false } + })); + return true; + } + + // Parallel fetch all initialization data + const [configResult, statusResult, semanticStatus] = await Promise.all([ + executeCodexLens(['config', '--json']), + executeCodexLens(['status', '--json']), + checkSemanticStatus() + ]); + + // Parse config + let config = { index_dir: '~/.codexlens/indexes', index_count: 0 }; + if (configResult.success) { + try { + const configData = extractJSON(configResult.output ?? ''); + if (configData.success && configData.result) { + config.index_dir = configData.result.index_dir || configData.result.index_root || config.index_dir; + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse config for dashboard init:', e instanceof Error ? e.message : String(e)); + } + } + + // Parse status + let statusData: any = {}; + if (statusResult.success) { + try { + const status = extractJSON(statusResult.output ?? ''); + if (status.success && status.result) { + config.index_count = status.result.projects_count || 0; + statusData = status.result; + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse status for dashboard init:', e instanceof Error ? e.message : String(e)); + } + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + installed: true, + status: venvStatus, + config, + semantic: semanticStatus, + statusData + })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Bootstrap (Install) + if (pathname === '/api/codexlens/bootstrap' && req.method === 'POST') { + handlePostRequest(req, res, async () => { + try { + const result = await bootstrapVenv(); + if (result.success) { + const status = await checkVenvStatus(); + broadcastToClients({ + type: 'CODEXLENS_INSTALLED', + payload: { version: status.version, timestamp: new Date().toISOString() } + }); + return { success: true, message: 'CodexLens installed successfully', version: status.version }; + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Uninstall + if (pathname === '/api/codexlens/uninstall' && req.method === 'POST') { + handlePostRequest(req, res, async () => { + try { + // Stop watcher if running (to release file handles) + await stopWatcherForUninstall(); + + if (isIndexingInProgress()) { + console.log('[CodexLens] Cancelling indexing before uninstall...'); + try { + cancelIndexing(); + } catch { + // Ignore errors + } + } + + // Wait a moment for processes to fully exit and release handles + await new Promise(resolve => setTimeout(resolve, 1000)); + + const result = await uninstallCodexLens(); + if (result.success) { + broadcastToClients({ + type: 'CODEXLENS_UNINSTALLED', + payload: { timestamp: new Date().toISOString() } + }); + return { success: true, message: 'CodexLens uninstalled successfully' }; + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Config - GET (Get current configuration with index count) + if (pathname === '/api/codexlens/config' && req.method === 'GET') { + try { + const venvStatus = await checkVenvStatus(); + let responseData = { index_dir: '~/.codexlens/indexes', index_count: 0, api_max_workers: 4, api_batch_size: 8 }; + + // If not installed, return default config without executing CodexLens + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(responseData)); + return true; + } + + const [configResult, statusResult] = await Promise.all([ + executeCodexLens(['config', '--json']), + executeCodexLens(['status', '--json']) + ]); + + // Parse config (extract JSON from output that may contain log messages) + if (configResult.success) { + try { + const config = extractJSON(configResult.output ?? ''); + if (config.success && config.result) { + // CLI returns index_dir (not index_root) + responseData.index_dir = config.result.index_dir || config.result.index_root || responseData.index_dir; + // Extract API settings + if (config.result.api_max_workers !== undefined) { + responseData.api_max_workers = config.result.api_max_workers; + } + if (config.result.api_batch_size !== undefined) { + responseData.api_batch_size = config.result.api_batch_size; + } + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse config:', e instanceof Error ? e.message : String(e)); + console.error('[CodexLens] Config output:', (configResult.output ?? '').substring(0, 200)); + } + } + + // Parse status to get index_count (projects_count) + if (statusResult.success) { + try { + const status = extractJSON(statusResult.output ?? ''); + if (status.success && status.result) { + responseData.index_count = status.result.projects_count || 0; + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse status:', e instanceof Error ? e.message : String(e)); + console.error('[CodexLens] Status output:', (statusResult.output ?? '').substring(0, 200)); + } + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(responseData)); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Config - POST (Set configuration) + if (pathname === '/api/codexlens/config' && req.method === 'POST') { + handlePostRequest(req, res, async (body: unknown) => { + const { index_dir, api_max_workers, api_batch_size } = body as { + index_dir?: unknown; + api_max_workers?: unknown; + api_batch_size?: unknown; + }; + + if (!index_dir) { + return { success: false, error: 'index_dir is required', status: 400 }; + } + + // Validate index_dir path + const indexDirStr = String(index_dir).trim(); + + // Check for dangerous patterns + if (indexDirStr.includes('\0')) { + return { success: false, error: 'Invalid path: contains null bytes', status: 400 }; + } + + // Prevent system root paths and their subdirectories (Windows and Unix) + const dangerousPaths = ['/', 'C:\\', 'C:/', '/etc', '/usr', '/bin', '/sys', '/proc', '/var', + 'C:\\Windows', 'C:\\Program Files', 'C:\\Program Files (x86)', 'C:\\System32']; + const normalizedPath = indexDirStr.replace(/\\/g, '/').toLowerCase(); + for (const dangerous of dangerousPaths) { + const dangerousLower = dangerous.replace(/\\/g, '/').toLowerCase(); + // Block exact match OR any subdirectory (using startsWith) + if (normalizedPath === dangerousLower || + normalizedPath === dangerousLower + '/' || + normalizedPath.startsWith(dangerousLower + '/')) { + return { success: false, error: 'Invalid path: cannot use system directories or their subdirectories', status: 400 }; + } + } + + // Additional check: prevent path traversal attempts + if (normalizedPath.includes('../') || normalizedPath.includes('/..')) { + return { success: false, error: 'Invalid path: path traversal not allowed', status: 400 }; + } + + // Validate api settings + if (api_max_workers !== undefined) { + const workers = Number(api_max_workers); + if (isNaN(workers) || workers < 1 || workers > 32) { + return { success: false, error: 'api_max_workers must be between 1 and 32', status: 400 }; + } + } + if (api_batch_size !== undefined) { + const batch = Number(api_batch_size); + if (isNaN(batch) || batch < 1 || batch > 64) { + return { success: false, error: 'api_batch_size must be between 1 and 64', status: 400 }; + } + } + + try { + // Set index_dir + const result = await executeCodexLens(['config', 'set', 'index_dir', indexDirStr, '--json']); + if (!result.success) { + return { success: false, error: result.error || 'Failed to update index_dir', status: 500 }; + } + + // Set API settings if provided + if (api_max_workers !== undefined) { + await executeCodexLens(['config', 'set', 'api_max_workers', String(api_max_workers), '--json']); + } + if (api_batch_size !== undefined) { + await executeCodexLens(['config', 'set', 'api_batch_size', String(api_batch_size), '--json']); + } + + return { success: true, message: 'Configuration updated successfully' }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: Detect GPU support for semantic search + if (pathname === '/api/codexlens/gpu/detect' && req.method === 'GET') { + try { + const gpuInfo = await detectGpuSupport(); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, ...gpuInfo })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: List available GPU devices for selection + if (pathname === '/api/codexlens/gpu/list' && req.method === 'GET') { + try { + // Try CodexLens gpu-list first if available + const venvStatus = await checkVenvStatus(); + if (venvStatus.ready) { + const result = await executeCodexLens(['gpu-list', '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + if (parsed.devices && parsed.devices.length > 0) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(parsed)); + return true; + } + } catch { + // Fall through to system detection + } + } + } + + // Fallback: Use system commands to detect GPUs + const devices: Array<{ name: string; type: string; index: number }> = []; + + if (process.platform === 'win32') { + // Windows: Use WMIC to get GPU info + try { + const { execSync } = await import('child_process'); + const wmicOutput = execSync('wmic path win32_VideoController get name', { + encoding: 'utf-8', + timeout: EXEC_TIMEOUTS.SYSTEM_INFO, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + const lines = wmicOutput.split('\n') + .map(line => line.trim()) + .filter(line => line && line !== 'Name'); + + lines.forEach((name, index) => { + if (name) { + const isIntegrated = name.toLowerCase().includes('intel') || + name.toLowerCase().includes('integrated'); + devices.push({ + name: name, + type: isIntegrated ? 'integrated' : 'discrete', + index: index + }); + } + }); + } catch (e) { + console.warn('[CodexLens] WMIC GPU detection failed:', (e as Error).message); + } + } else { + // Linux/Mac: Try nvidia-smi for NVIDIA GPUs + try { + const { execSync } = await import('child_process'); + const nvidiaOutput = execSync('nvidia-smi --query-gpu=name --format=csv,noheader', { + encoding: 'utf-8', + timeout: EXEC_TIMEOUTS.SYSTEM_INFO, + stdio: ['pipe', 'pipe', 'pipe'] + }); + + const lines = nvidiaOutput.split('\n').filter(line => line.trim()); + lines.forEach((name, index) => { + devices.push({ + name: name.trim(), + type: 'discrete', + index: index + }); + }); + } catch { + // NVIDIA not available, that's fine + } + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, devices: devices, selected_device_id: null })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: Select GPU device for embedding + if (pathname === '/api/codexlens/gpu/select' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { device_id } = body as { device_id?: unknown }; + const resolvedDeviceId = typeof device_id === 'string' || typeof device_id === 'number' ? device_id : undefined; + + if (resolvedDeviceId === undefined) { + return { success: false, error: 'device_id is required', status: 400 }; + } + + try { + const result = await executeCodexLens(['gpu-select', String(resolvedDeviceId), '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return parsed; + } catch { + return { success: true, message: 'GPU selected', output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: Reset GPU selection to auto-detection + if (pathname === '/api/codexlens/gpu/reset' && req.method === 'POST') { + handlePostRequest(req, res, async () => { + try { + const result = await executeCodexLens(['gpu-reset', '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return parsed; + } catch { + return { success: true, message: 'GPU selection reset', output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Model List (list available embedding models) + if (pathname === '/api/codexlens/models' && req.method === 'GET') { + try { + // Check if CodexLens is installed first (without auto-installing) + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' })); + return true; + } + const result = await executeCodexLens(['model-list', '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(parsed)); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Model Download (download embedding model by profile) + if (pathname === '/api/codexlens/models/download' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { profile } = body as { profile?: unknown }; + const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined; + + if (!resolvedProfile) { + return { success: false, error: 'profile is required', status: 400 }; + } + + try { + const result = await executeCodexLens(['model-download', resolvedProfile, '--json'], { timeout: 600000 }); // 10 min for download + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, ...parsed }; + } catch { + return { success: true, output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Model Delete (delete embedding model by profile) + if (pathname === '/api/codexlens/models/delete' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { profile } = body as { profile?: unknown }; + const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined; + + if (!resolvedProfile) { + return { success: false, error: 'profile is required', status: 400 }; + } + + try { + const result = await executeCodexLens(['model-delete', resolvedProfile, '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, ...parsed }; + } catch { + return { success: true, output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Model Info (get model info by profile) + if (pathname === '/api/codexlens/models/info' && req.method === 'GET') { + const profile = url.searchParams.get('profile'); + + if (!profile) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'profile parameter is required' })); + return true; + } + + try { + const result = await executeCodexLens(['model-info', profile, '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(parsed)); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Failed to parse response' })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // ============================================================ + // ENV FILE MANAGEMENT ENDPOINTS + // ============================================================ + + // API: Get global env file content + if (pathname === '/api/codexlens/env' && req.method === 'GET') { + try { + const { homedir } = await import('os'); + const { join } = await import('path'); + const { readFile } = await import('fs/promises'); + + const envPath = join(homedir(), '.codexlens', '.env'); + let content = ''; + try { + content = await readFile(envPath, 'utf-8'); + } catch { + // File doesn't exist, return empty + } + + // Parse env file into key-value pairs (robust parsing) + const envVars: Record = {}; + const lines = content.split('\n'); + for (const line of lines) { + const trimmed = line.trim(); + // Skip empty lines and comments + if (!trimmed || trimmed.startsWith('#')) continue; + + // Find first = that's part of key=value (not in a quote) + const eqIndex = trimmed.indexOf('='); + if (eqIndex <= 0) continue; + + const key = trimmed.substring(0, eqIndex).trim(); + // Validate key format (alphanumeric + underscore) + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue; + + let value = trimmed.substring(eqIndex + 1); + + // Handle quoted values (preserves = inside quotes) + if (value.startsWith('"')) { + // Find matching closing quote (handle escaped quotes) + let end = 1; + while (end < value.length) { + if (value[end] === '"' && value[end - 1] !== '\\') break; + end++; + } + value = value.substring(1, end).replace(/\\"/g, '"'); + } else if (value.startsWith("'")) { + // Single quotes don't support escaping + const end = value.indexOf("'", 1); + value = end > 0 ? value.substring(1, end) : value.substring(1); + } else { + // Unquoted: trim and take until comment or end + const commentIndex = value.indexOf(' #'); + if (commentIndex > 0) { + value = value.substring(0, commentIndex); + } + value = value.trim(); + } + + envVars[key] = value; + } + + // Also read settings.json for current configuration + const settingsPath = join(homedir(), '.codexlens', 'settings.json'); + let settings: Record = {}; + try { + const settingsContent = await readFile(settingsPath, 'utf-8'); + settings = JSON.parse(settingsContent); + } catch { + // Settings file doesn't exist or is invalid, use empty + } + + // Map settings to env var format for defaults + const settingsDefaults: Record = {}; + + // Embedding settings + if (settings.embedding?.backend) { + settingsDefaults['CODEXLENS_EMBEDDING_BACKEND'] = settings.embedding.backend; + } + if (settings.embedding?.model) { + settingsDefaults['CODEXLENS_EMBEDDING_MODEL'] = settings.embedding.model; + settingsDefaults['LITELLM_EMBEDDING_MODEL'] = settings.embedding.model; + } + if (settings.embedding?.use_gpu !== undefined) { + settingsDefaults['CODEXLENS_USE_GPU'] = String(settings.embedding.use_gpu); + } + if (settings.embedding?.strategy) { + settingsDefaults['CODEXLENS_EMBEDDING_STRATEGY'] = settings.embedding.strategy; + } + if (settings.embedding?.cooldown !== undefined) { + settingsDefaults['CODEXLENS_EMBEDDING_COOLDOWN'] = String(settings.embedding.cooldown); + } + + // Reranker settings + if (settings.reranker?.backend) { + settingsDefaults['CODEXLENS_RERANKER_BACKEND'] = settings.reranker.backend; + } + if (settings.reranker?.model) { + settingsDefaults['CODEXLENS_RERANKER_MODEL'] = settings.reranker.model; + settingsDefaults['LITELLM_RERANKER_MODEL'] = settings.reranker.model; + } + if (settings.reranker?.enabled !== undefined) { + settingsDefaults['CODEXLENS_RERANKER_ENABLED'] = String(settings.reranker.enabled); + } + if (settings.reranker?.top_k !== undefined) { + settingsDefaults['CODEXLENS_RERANKER_TOP_K'] = String(settings.reranker.top_k); + } + + // API/Concurrency settings + if (settings.api?.max_workers !== undefined) { + settingsDefaults['CODEXLENS_API_MAX_WORKERS'] = String(settings.api.max_workers); + } + if (settings.api?.batch_size !== undefined) { + settingsDefaults['CODEXLENS_API_BATCH_SIZE'] = String(settings.api.batch_size); + } + + // Cascade search settings + if (settings.cascade?.strategy) { + settingsDefaults['CODEXLENS_CASCADE_STRATEGY'] = settings.cascade.strategy; + } + if (settings.cascade?.coarse_k !== undefined) { + settingsDefaults['CODEXLENS_CASCADE_COARSE_K'] = String(settings.cascade.coarse_k); + } + if (settings.cascade?.fine_k !== undefined) { + settingsDefaults['CODEXLENS_CASCADE_FINE_K'] = String(settings.cascade.fine_k); + } + + // LLM settings + if (settings.llm?.enabled !== undefined) { + settingsDefaults['CODEXLENS_LLM_ENABLED'] = String(settings.llm.enabled); + } + if (settings.llm?.batch_size !== undefined) { + settingsDefaults['CODEXLENS_LLM_BATCH_SIZE'] = String(settings.llm.batch_size); + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + success: true, + path: envPath, + env: envVars, + raw: content, + settings: settingsDefaults + })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: Save global env file content (merge mode - preserves existing values) + if (pathname === '/api/codexlens/env' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { env } = body as { env: Record }; + + if (!env || typeof env !== 'object') { + return { success: false, error: 'env object is required', status: 400 }; + } + + try { + const { homedir } = await import('os'); + const { join, dirname } = await import('path'); + const { writeFile, mkdir, readFile } = await import('fs/promises'); + + const envPath = join(homedir(), '.codexlens', '.env'); + await mkdir(dirname(envPath), { recursive: true }); + + // Read existing env file to preserve custom variables + let existingEnv: Record = {}; + let existingComments: string[] = []; + try { + const content = await readFile(envPath, 'utf-8'); + const lines = content.split('\n'); + for (const line of lines) { + const trimmed = line.trim(); + // Preserve comment lines that aren't our headers + if (trimmed.startsWith('#') && !trimmed.includes('Managed by CCW')) { + if (!trimmed.includes('Reranker API') && !trimmed.includes('Embedding API') && + !trimmed.includes('LiteLLM Config') && !trimmed.includes('CodexLens Settings') && + !trimmed.includes('Other Settings') && !trimmed.includes('CodexLens Environment')) { + existingComments.push(line); + } + } + if (!trimmed || trimmed.startsWith('#')) continue; + + // Robust parsing (same as GET handler) + const eqIndex = trimmed.indexOf('='); + if (eqIndex <= 0) continue; + + const key = trimmed.substring(0, eqIndex).trim(); + if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue; + + let value = trimmed.substring(eqIndex + 1); + if (value.startsWith('"')) { + let end = 1; + while (end < value.length) { + if (value[end] === '"' && value[end - 1] !== '\\') break; + end++; + } + value = value.substring(1, end).replace(/\\"/g, '"'); + } else if (value.startsWith("'")) { + const end = value.indexOf("'", 1); + value = end > 0 ? value.substring(1, end) : value.substring(1); + } else { + const commentIndex = value.indexOf(' #'); + if (commentIndex > 0) value = value.substring(0, commentIndex); + value = value.trim(); + } + existingEnv[key] = value; + } + } catch { + // File doesn't exist, start fresh + } + + // Merge: update known keys from payload, preserve unknown keys + const knownKeys = new Set([ + 'RERANKER_API_KEY', 'RERANKER_API_BASE', 'RERANKER_MODEL', + 'EMBEDDING_API_KEY', 'EMBEDDING_API_BASE', 'EMBEDDING_MODEL', + 'LITELLM_API_KEY', 'LITELLM_API_BASE', 'LITELLM_MODEL' + ]); + + // Apply updates from payload + for (const [key, value] of Object.entries(env)) { + if (value) { + existingEnv[key] = value; + } else if (knownKeys.has(key)) { + // Remove known key if value is empty + delete existingEnv[key]; + } + } + + // Build env file content + const lines = [ + '# CodexLens Environment Configuration', + '# Managed by CCW Dashboard', + '' + ]; + + // Add preserved custom comments + if (existingComments.length > 0) { + lines.push(...existingComments, ''); + } + + // Group by prefix + const groups: Record = { + 'RERANKER': [], + 'EMBEDDING': [], + 'LITELLM': [], + 'CODEXLENS': [], + 'OTHER': [] + }; + + for (const [key, value] of Object.entries(existingEnv)) { + if (!value) continue; + // SECURITY: Escape special characters to prevent .env injection + const escapedValue = value + .replace(/\\/g, '\\\\') // Escape backslashes first + .replace(/"/g, '\\"') // Escape double quotes + .replace(/\n/g, '\\n') // Escape newlines + .replace(/\r/g, '\\r'); // Escape carriage returns + const line = `${key}="${escapedValue}"`; + if (key.startsWith('RERANKER_')) groups['RERANKER'].push(line); + else if (key.startsWith('EMBEDDING_')) groups['EMBEDDING'].push(line); + else if (key.startsWith('LITELLM_')) groups['LITELLM'].push(line); + else if (key.startsWith('CODEXLENS_')) groups['CODEXLENS'].push(line); + else groups['OTHER'].push(line); + } + + // Add grouped content + if (groups['RERANKER'].length) { + lines.push('# Reranker API Configuration'); + lines.push(...groups['RERANKER'], ''); + } + if (groups['EMBEDDING'].length) { + lines.push('# Embedding API Configuration'); + lines.push(...groups['EMBEDDING'], ''); + } + if (groups['LITELLM'].length) { + lines.push('# LiteLLM Configuration'); + lines.push(...groups['LITELLM'], ''); + } + if (groups['CODEXLENS'].length) { + lines.push('# CodexLens Settings'); + lines.push(...groups['CODEXLENS'], ''); + } + if (groups['OTHER'].length) { + lines.push('# Other Settings'); + lines.push(...groups['OTHER'], ''); + } + + await writeFile(envPath, lines.join('\n'), 'utf-8'); + + // Also update settings.json with mapped values + const settingsPath = join(homedir(), '.codexlens', 'settings.json'); + let settings: Record = {}; + try { + const settingsContent = await readFile(settingsPath, 'utf-8'); + settings = JSON.parse(settingsContent); + } catch { + // File doesn't exist, create default structure + settings = { embedding: {}, reranker: {}, api: {}, cascade: {}, llm: {} }; + } + + // Map env vars to settings.json structure + const envToSettings: Record any }> = { + 'CODEXLENS_EMBEDDING_BACKEND': { path: ['embedding', 'backend'] }, + 'CODEXLENS_EMBEDDING_MODEL': { path: ['embedding', 'model'] }, + 'CODEXLENS_USE_GPU': { path: ['embedding', 'use_gpu'], transform: v => v === 'true' }, + 'CODEXLENS_EMBEDDING_STRATEGY': { path: ['embedding', 'strategy'] }, + 'CODEXLENS_EMBEDDING_COOLDOWN': { path: ['embedding', 'cooldown'], transform: v => parseFloat(v) }, + 'CODEXLENS_RERANKER_BACKEND': { path: ['reranker', 'backend'] }, + 'CODEXLENS_RERANKER_MODEL': { path: ['reranker', 'model'] }, + 'CODEXLENS_RERANKER_ENABLED': { path: ['reranker', 'enabled'], transform: v => v === 'true' }, + 'CODEXLENS_RERANKER_TOP_K': { path: ['reranker', 'top_k'], transform: v => parseInt(v, 10) }, + 'CODEXLENS_API_MAX_WORKERS': { path: ['api', 'max_workers'], transform: v => parseInt(v, 10) }, + 'CODEXLENS_API_BATCH_SIZE': { path: ['api', 'batch_size'], transform: v => parseInt(v, 10) }, + 'CODEXLENS_CASCADE_STRATEGY': { path: ['cascade', 'strategy'] }, + 'CODEXLENS_CASCADE_COARSE_K': { path: ['cascade', 'coarse_k'], transform: v => parseInt(v, 10) }, + 'CODEXLENS_CASCADE_FINE_K': { path: ['cascade', 'fine_k'], transform: v => parseInt(v, 10) }, + 'CODEXLENS_LLM_ENABLED': { path: ['llm', 'enabled'], transform: v => v === 'true' }, + 'CODEXLENS_LLM_BATCH_SIZE': { path: ['llm', 'batch_size'], transform: v => parseInt(v, 10) }, + 'LITELLM_EMBEDDING_MODEL': { path: ['embedding', 'model'] }, + 'LITELLM_RERANKER_MODEL': { path: ['reranker', 'model'] } + }; + + // Apply env vars to settings + for (const [envKey, value] of Object.entries(env)) { + const mapping = envToSettings[envKey]; + if (mapping && value) { + const [section, key] = mapping.path; + if (!settings[section]) settings[section] = {}; + settings[section][key] = mapping.transform ? mapping.transform(value) : value; + } + } + + // Write updated settings + await writeFile(settingsPath, JSON.stringify(settings, null, 2), 'utf-8'); + + return { + success: true, + message: 'Environment and settings configuration saved', + path: envPath, + settingsPath + }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + return false; +} diff --git a/ccw/src/core/routes/codexlens/index-handlers.ts b/ccw/src/core/routes/codexlens/index-handlers.ts new file mode 100644 index 00000000..36e5b341 --- /dev/null +++ b/ccw/src/core/routes/codexlens/index-handlers.ts @@ -0,0 +1,335 @@ +/** + * CodexLens index management handlers. + */ + +import { + cancelIndexing, + checkVenvStatus, + ensureLiteLLMEmbedderReady, + executeCodexLens, + isIndexingInProgress, +} from '../../../tools/codex-lens.js'; +import type { ProgressInfo } from '../../../tools/codex-lens.js'; +import type { RouteContext } from '../types.js'; +import { extractJSON, formatSize } from './utils.js'; + +/** + * Handle CodexLens index routes + * @returns true if route was handled, false otherwise + */ +export async function handleCodexLensIndexRoutes(ctx: RouteContext): Promise { + const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx; + + // API: CodexLens Index List - Get all indexed projects with details + if (pathname === '/api/codexlens/indexes') { + try { + // Check if CodexLens is installed first (without auto-installing) + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, indexes: [], totalSize: 0, totalSizeFormatted: '0 B' })); + return true; + } + + // Execute all CLI commands in parallel + const [configResult, projectsResult, statusResult] = await Promise.all([ + executeCodexLens(['config', '--json']), + executeCodexLens(['projects', 'list', '--json']), + executeCodexLens(['status', '--json']) + ]); + + let indexDir = ''; + if (configResult.success) { + try { + const config = extractJSON(configResult.output ?? ''); + if (config.success && config.result) { + // CLI returns index_dir (not index_root) + indexDir = config.result.index_dir || config.result.index_root || ''; + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse config for index list:', e instanceof Error ? e.message : String(e)); + } + } + + let indexes: any[] = []; + let totalSize = 0; + let vectorIndexCount = 0; + let normalIndexCount = 0; + + if (projectsResult.success) { + try { + const projectsData = extractJSON(projectsResult.output ?? ''); + if (projectsData.success && Array.isArray(projectsData.result)) { + const { stat, readdir } = await import('fs/promises'); + const { existsSync } = await import('fs'); + const { basename, join } = await import('path'); + + for (const project of projectsData.result) { + // Skip test/temp projects + if (project.source_root && ( + project.source_root.includes('\\Temp\\') || + project.source_root.includes('/tmp/') || + project.total_files === 0 + )) { + continue; + } + + let projectSize = 0; + let hasVectorIndex = false; + let hasNormalIndex = true; // All projects have FTS index + let lastModified = null; + + // Try to get actual index size from index_root + if (project.index_root && existsSync(project.index_root)) { + try { + const files = await readdir(project.index_root); + for (const file of files) { + try { + const filePath = join(project.index_root, file); + const fileStat = await stat(filePath); + projectSize += fileStat.size; + if (!lastModified || fileStat.mtime > lastModified) { + lastModified = fileStat.mtime; + } + // Check for vector/embedding files + if (file.includes('vector') || file.includes('embedding') || + file.endsWith('.faiss') || file.endsWith('.npy') || + file.includes('semantic_chunks')) { + hasVectorIndex = true; + } + } catch { + // Skip files we can't stat + } + } + } catch { + // Can't read index directory + } + } + + if (hasVectorIndex) vectorIndexCount++; + if (hasNormalIndex) normalIndexCount++; + totalSize += projectSize; + + // Use source_root as the display name + const displayName = project.source_root ? basename(project.source_root) : `project_${project.id}`; + + indexes.push({ + id: displayName, + path: project.source_root || '', + indexPath: project.index_root || '', + size: projectSize, + sizeFormatted: formatSize(projectSize), + fileCount: project.total_files || 0, + dirCount: project.total_dirs || 0, + hasVectorIndex, + hasNormalIndex, + status: project.status || 'active', + lastModified: lastModified ? lastModified.toISOString() : null + }); + } + + // Sort by file count (most files first), then by name + indexes.sort((a, b) => { + if (b.fileCount !== a.fileCount) return b.fileCount - a.fileCount; + return a.id.localeCompare(b.id); + }); + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse projects list:', e instanceof Error ? e.message : String(e)); + } + } + + // Parse summary stats from status command (already fetched in parallel) + let statusSummary: any = {}; + + if (statusResult.success) { + try { + const status = extractJSON(statusResult.output ?? ''); + if (status.success && status.result) { + statusSummary = { + totalProjects: status.result.projects_count || indexes.length, + totalFiles: status.result.total_files || 0, + totalDirs: status.result.total_dirs || 0, + // Keep calculated totalSize for consistency with per-project sizes + // status.index_size_bytes includes shared resources (models, cache) + indexSizeBytes: totalSize, + indexSizeMb: totalSize / (1024 * 1024), + embeddings: status.result.embeddings || {}, + // Store full index dir size separately for reference + fullIndexDirSize: status.result.index_size_bytes || 0, + fullIndexDirSizeFormatted: formatSize(status.result.index_size_bytes || 0) + }; + } + } catch (e: unknown) { + console.error('[CodexLens] Failed to parse status:', e instanceof Error ? e.message : String(e)); + } + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + success: true, + indexDir, + indexes, + summary: { + totalProjects: indexes.length, + totalSize, + totalSizeFormatted: formatSize(totalSize), + vectorIndexCount, + normalIndexCount, + ...statusSummary + } + })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Clean (Clean indexes) + if (pathname === '/api/codexlens/clean' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { all = false, path } = body as { all?: unknown; path?: unknown }; + + try { + const args = ['clean']; + if (all === true) { + args.push('--all'); + } else if (typeof path === 'string' && path.trim().length > 0) { + // Path is passed as a positional argument, not as a flag + args.push(path); + } + args.push('--json'); + + const result = await executeCodexLens(args); + if (result.success) { + return { success: true, message: 'Indexes cleaned successfully' }; + } else { + return { success: false, error: result.error || 'Failed to clean indexes', status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Init (Initialize workspace index) + if (pathname === '/api/codexlens/init' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { path: projectPath, indexType = 'vector', embeddingModel = 'code', embeddingBackend = 'fastembed', maxWorkers = 1 } = body as { + path?: unknown; + indexType?: unknown; + embeddingModel?: unknown; + embeddingBackend?: unknown; + maxWorkers?: unknown; + }; + const targetPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath; + const resolvedIndexType = indexType === 'normal' ? 'normal' : 'vector'; + const resolvedEmbeddingModel = typeof embeddingModel === 'string' && embeddingModel.trim().length > 0 ? embeddingModel : 'code'; + const resolvedEmbeddingBackend = typeof embeddingBackend === 'string' && embeddingBackend.trim().length > 0 ? embeddingBackend : 'fastembed'; + const resolvedMaxWorkers = typeof maxWorkers === 'number' ? maxWorkers : Number(maxWorkers); + + // Ensure LiteLLM backend dependencies are installed before running the CLI + if (resolvedIndexType !== 'normal' && resolvedEmbeddingBackend === 'litellm') { + const installResult = await ensureLiteLLMEmbedderReady(); + if (!installResult.success) { + return { success: false, error: installResult.error || 'Failed to prepare LiteLLM embedder', status: 500 }; + } + } + + // Build CLI arguments based on index type + // Use 'index init' subcommand (new CLI structure) + const args = ['index', 'init', targetPath, '--json']; + if (resolvedIndexType === 'normal') { + args.push('--no-embeddings'); + } else { + // Add embedding model selection for vector index (use --model, not --embedding-model) + args.push('--model', resolvedEmbeddingModel); + // Add embedding backend if not using default fastembed (use --backend, not --embedding-backend) + if (resolvedEmbeddingBackend && resolvedEmbeddingBackend !== 'fastembed') { + args.push('--backend', resolvedEmbeddingBackend); + } + // Add max workers for concurrent API calls (useful for litellm backend) + if (!Number.isNaN(resolvedMaxWorkers) && resolvedMaxWorkers > 1) { + args.push('--max-workers', String(resolvedMaxWorkers)); + } + } + + // Broadcast start event + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { stage: 'start', message: 'Starting index...', percent: 0, path: targetPath, indexType: resolvedIndexType } + }); + + try { + const result = await executeCodexLens(args, { + cwd: targetPath, + timeout: 1800000, // 30 minutes for large codebases + onProgress: (progress: ProgressInfo) => { + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { ...progress, path: targetPath } + }); + } + }); + + if (result.success) { + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { stage: 'complete', message: 'Index complete', percent: 100, path: targetPath } + }); + + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, result: parsed }; + } catch { + return { success: true, output: result.output ?? '' }; + } + } else { + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { stage: 'error', message: result.error || 'Unknown error', percent: 0, path: targetPath } + }); + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { stage: 'error', message, percent: 0, path: targetPath } + }); + return { success: false, error: message, status: 500 }; + } + }); + return true; + } + + // API: Cancel CodexLens Indexing + if (pathname === '/api/codexlens/cancel' && req.method === 'POST') { + const result = cancelIndexing(); + + // Broadcast cancellation event + if (result.success) { + broadcastToClients({ + type: 'CODEXLENS_INDEX_PROGRESS', + payload: { stage: 'cancelled', message: 'Indexing cancelled by user', percent: 0 } + }); + } + + res.writeHead(result.success ? 200 : 400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + return true; + } + + // API: Check if indexing is in progress + if (pathname === '/api/codexlens/indexing-status') { + const inProgress = isIndexingInProgress(); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, inProgress })); + return true; + } + + return false; +} + diff --git a/ccw/src/core/routes/codexlens/semantic-handlers.ts b/ccw/src/core/routes/codexlens/semantic-handlers.ts new file mode 100644 index 00000000..35e6568f --- /dev/null +++ b/ccw/src/core/routes/codexlens/semantic-handlers.ts @@ -0,0 +1,767 @@ +/** + * CodexLens semantic search + reranker + SPLADE handlers. + */ + +import { + checkSemanticStatus, + checkVenvStatus, + executeCodexLens, + installSemantic, +} from '../../../tools/codex-lens.js'; +import type { GpuMode } from '../../../tools/codex-lens.js'; +import { loadLiteLLMApiConfig } from '../../../config/litellm-api-config-manager.js'; +import type { RouteContext } from '../types.js'; +import { extractJSON } from './utils.js'; + +export async function handleCodexLensSemanticRoutes(ctx: RouteContext): Promise { + const { pathname, url, req, res, initialPath, handlePostRequest } = ctx; + + // API: CodexLens Semantic Search Status + if (pathname === '/api/codexlens/semantic/status') { + const status = await checkSemanticStatus(); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(status)); + return true; + } + + // API: CodexLens Semantic Metadata List + if (pathname === '/api/codexlens/semantic/metadata') { + const offset = parseInt(url.searchParams.get('offset') || '0', 10); + const limit = parseInt(url.searchParams.get('limit') || '50', 10); + const tool = url.searchParams.get('tool') || ''; + const projectPath = url.searchParams.get('path') || initialPath; + + try { + const args = [ + 'semantic-list', + '--path', projectPath, + '--offset', offset.toString(), + '--limit', limit.toString(), + '--json' + ]; + if (tool) { + args.push('--tool', tool); + } + + const result = await executeCodexLens(args, { cwd: projectPath }); + + if (result.success) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(result.output ?? ''); + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens LLM Enhancement (run enhance command) + if (pathname === '/api/codexlens/enhance' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { path: projectPath, tool = 'gemini', batchSize = 5, timeoutMs = 300000 } = body as { + path?: unknown; + tool?: unknown; + batchSize?: unknown; + timeoutMs?: unknown; + }; + const targetPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath; + const resolvedTool = typeof tool === 'string' && tool.trim().length > 0 ? tool : 'gemini'; + const resolvedBatchSize = typeof batchSize === 'number' ? batchSize : Number(batchSize); + const resolvedTimeoutMs = typeof timeoutMs === 'number' ? timeoutMs : Number(timeoutMs); + + try { + const args = ['enhance', targetPath, '--tool', resolvedTool, '--batch-size', String(resolvedBatchSize)]; + const timeout = !Number.isNaN(resolvedTimeoutMs) ? resolvedTimeoutMs + 30000 : 330000; + const result = await executeCodexLens(args, { cwd: targetPath, timeout }); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, result: parsed }; + } catch { + return { success: true, output: result.output ?? '' }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: CodexLens Search (FTS5 text search with mode support) + if (pathname === '/api/codexlens/search') { + const query = url.searchParams.get('query') || ''; + const limit = parseInt(url.searchParams.get('limit') || '20', 10); + const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector + const maxContentLength = parseInt(url.searchParams.get('max_content_length') || '200', 10); + const extraFilesCount = parseInt(url.searchParams.get('extra_files_count') || '10', 10); + const projectPath = url.searchParams.get('path') || initialPath; + + if (!query) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Query parameter is required' })); + return true; + } + + try { + // Request more results to support split (full content + extra files) + const totalToFetch = limit + extraFilesCount; + // Use --method instead of deprecated --mode + const args = ['search', query, '--path', projectPath, '--limit', totalToFetch.toString(), '--method', mode, '--json']; + + const result = await executeCodexLens(args, { cwd: projectPath }); + + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + const allResults = parsed.result?.results || []; + + // Truncate content and split results + const truncateContent = (content: string | null | undefined): string => { + if (!content) return ''; + if (content.length <= maxContentLength) return content; + return content.slice(0, maxContentLength) + '...'; + }; + + // Split results: first N with full content, rest as file paths only + const resultsWithContent = allResults.slice(0, limit).map((r: any) => ({ + ...r, + content: truncateContent(r.content || r.excerpt), + excerpt: truncateContent(r.excerpt || r.content), + })); + + const extraResults = allResults.slice(limit, limit + extraFilesCount); + const extraFiles = [...new Set(extraResults.map((r: any) => r.path || r.file))]; + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + success: true, + results: resultsWithContent, + extra_files: extraFiles.length > 0 ? extraFiles : undefined, + metadata: { + total: allResults.length, + limit, + max_content_length: maxContentLength, + extra_files_count: extraFilesCount, + }, + })); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, results: [], output: result.output })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Search Files Only (return file paths only, with mode support) + if (pathname === '/api/codexlens/search_files') { + const query = url.searchParams.get('query') || ''; + const limit = parseInt(url.searchParams.get('limit') || '20', 10); + const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector + const projectPath = url.searchParams.get('path') || initialPath; + + if (!query) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Query parameter is required' })); + return true; + } + + try { + // Use --method instead of deprecated --mode + const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--method', mode, '--files-only', '--json']; + + const result = await executeCodexLens(args, { cwd: projectPath }); + + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, ...parsed.result })); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, files: [], output: result.output })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Symbol Search (search for symbols by name) + if (pathname === '/api/codexlens/symbol') { + const query = url.searchParams.get('query') || ''; + const file = url.searchParams.get('file'); + const limit = parseInt(url.searchParams.get('limit') || '20', 10); + const projectPath = url.searchParams.get('path') || initialPath; + + if (!query && !file) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Either query or file parameter is required' })); + return true; + } + + try { + let args; + if (file) { + // Get symbols from a specific file + args = ['symbol', '--file', file, '--json']; + } else { + // Search for symbols by name + args = ['symbol', query, '--path', projectPath, '--limit', limit.toString(), '--json']; + } + + const result = await executeCodexLens(args, { cwd: projectPath }); + + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, ...parsed.result })); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, symbols: [], output: result.output })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: CodexLens Semantic Search Install (with GPU mode support) + if (pathname === '/api/codexlens/semantic/install' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + try { + // Get GPU mode from request body, default to 'cpu' + const { gpuMode } = body as { gpuMode?: unknown }; + const resolvedGpuModeCandidate = typeof gpuMode === 'string' && gpuMode.trim().length > 0 ? gpuMode : 'cpu'; + const validModes: GpuMode[] = ['cpu', 'cuda', 'directml']; + + if (!validModes.includes(resolvedGpuModeCandidate as GpuMode)) { + return { + success: false, + error: `Invalid GPU mode: ${resolvedGpuModeCandidate}. Valid modes: ${validModes.join(', ')}`, + status: 400 + }; + } + + const resolvedGpuMode = resolvedGpuModeCandidate as GpuMode; + const result = await installSemantic(resolvedGpuMode); + if (result.success) { + const status = await checkSemanticStatus(); + const modeDescriptions = { + cpu: 'CPU (ONNX Runtime)', + cuda: 'NVIDIA CUDA GPU', + directml: 'Windows DirectML GPU' + }; + return { + success: true, + message: `Semantic search installed successfully with ${modeDescriptions[resolvedGpuMode]}`, + gpuMode: resolvedGpuMode, + ...status + }; + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // ============================================================ + // RERANKER CONFIGURATION ENDPOINTS + // ============================================================ + + // API: Get Reranker Configuration + if (pathname === '/api/codexlens/reranker/config' && req.method === 'GET') { + try { + const venvStatus = await checkVenvStatus(); + + // Default reranker config (matches fastembed default) + const rerankerConfig = { + backend: 'fastembed', + model_name: 'Xenova/ms-marco-MiniLM-L-6-v2', + api_provider: 'siliconflow', + api_key_set: false, + available_backends: ['onnx', 'api', 'litellm', 'legacy'], + api_providers: ['siliconflow', 'cohere', 'jina'], + litellm_endpoints: [] as string[], + config_source: 'default' + }; + + // Load LiteLLM endpoints for dropdown + try { + const litellmConfig = loadLiteLLMApiConfig(initialPath); + if (litellmConfig.endpoints && Array.isArray(litellmConfig.endpoints)) { + rerankerConfig.litellm_endpoints = litellmConfig.endpoints.map( + (ep: any) => ep.alias || ep.name || ep.baseUrl + ).filter(Boolean); + } + } catch { + // LiteLLM config not available, continue with empty endpoints + } + + // If CodexLens is installed, try to get actual config + if (venvStatus.ready) { + try { + const result = await executeCodexLens(['config', '--json']); + if (result.success) { + const config = extractJSON(result.output ?? ''); + if (config.success && config.result) { + // Map config values + if (config.result.reranker_backend) { + rerankerConfig.backend = config.result.reranker_backend; + rerankerConfig.config_source = 'codexlens'; + } + if (config.result.reranker_model) { + rerankerConfig.model_name = config.result.reranker_model; + } + if (config.result.reranker_api_provider) { + rerankerConfig.api_provider = config.result.reranker_api_provider; + } + // Check if API key is set (from env) + if (process.env.RERANKER_API_KEY) { + rerankerConfig.api_key_set = true; + } + } + } + } catch (e) { + console.error('[CodexLens] Failed to get reranker config:', e); + } + } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, ...rerankerConfig })); + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: Set Reranker Configuration + if (pathname === '/api/codexlens/reranker/config' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { backend, model_name, api_provider, api_key, litellm_endpoint } = body as { + backend?: unknown; + model_name?: unknown; + api_provider?: unknown; + api_key?: unknown; + litellm_endpoint?: unknown; + }; + const resolvedBackend = typeof backend === 'string' && backend.trim().length > 0 ? backend : undefined; + const resolvedModelName = typeof model_name === 'string' && model_name.trim().length > 0 ? model_name : undefined; + const resolvedApiProvider = typeof api_provider === 'string' && api_provider.trim().length > 0 ? api_provider : undefined; + const resolvedApiKey = typeof api_key === 'string' && api_key.trim().length > 0 ? api_key : undefined; + const resolvedLiteLLMEndpoint = + typeof litellm_endpoint === 'string' && litellm_endpoint.trim().length > 0 ? litellm_endpoint : undefined; + + // Validate backend + const validBackends = ['onnx', 'api', 'litellm', 'legacy', 'fastembed']; + if (resolvedBackend && !validBackends.includes(resolvedBackend)) { + return { + success: false, + error: `Invalid backend: ${resolvedBackend}. Valid options: ${validBackends.join(', ')}`, + status: 400 + }; + } + + // Validate api_provider + const validProviders = ['siliconflow', 'cohere', 'jina']; + if (resolvedApiProvider && !validProviders.includes(resolvedApiProvider)) { + return { + success: false, + error: `Invalid api_provider: ${resolvedApiProvider}. Valid options: ${validProviders.join(', ')}`, + status: 400 + }; + } + + try { + const updates: string[] = []; + + // Set backend + if (resolvedBackend) { + const result = await executeCodexLens(['config', 'set', 'reranker_backend', resolvedBackend, '--json']); + if (result.success) updates.push('backend'); + } + + // Set model + if (resolvedModelName) { + const result = await executeCodexLens(['config', 'set', 'reranker_model', resolvedModelName, '--json']); + if (result.success) updates.push('model_name'); + } + + // Set API provider + if (resolvedApiProvider) { + const result = await executeCodexLens(['config', 'set', 'reranker_api_provider', resolvedApiProvider, '--json']); + if (result.success) updates.push('api_provider'); + } + + // Set LiteLLM endpoint + if (resolvedLiteLLMEndpoint) { + const result = await executeCodexLens([ + 'config', + 'set', + 'reranker_litellm_endpoint', + resolvedLiteLLMEndpoint, + '--json' + ]); + if (result.success) updates.push('litellm_endpoint'); + } + + // Handle API key - write to .env file or environment + if (resolvedApiKey) { + // For security, we store in process.env for the current session + // In production, this should be written to a secure .env file + process.env.RERANKER_API_KEY = resolvedApiKey; + updates.push('api_key'); + } + + return { + success: true, + message: `Updated: ${updates.join(', ')}`, + updated_fields: updates + }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // ============================================================ + // RERANKER MODEL MANAGEMENT ENDPOINTS + // ============================================================ + + // API: List Reranker Models (list available reranker models) + if (pathname === '/api/codexlens/reranker/models' && req.method === 'GET') { + try { + // Check if CodexLens is installed first + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' })); + return true; + } + const result = await executeCodexLens(['reranker-model-list', '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(parsed)); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: Download Reranker Model (download reranker model by profile) + if (pathname === '/api/codexlens/reranker/models/download' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { profile } = body as { profile?: unknown }; + const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined; + + if (!resolvedProfile) { + return { success: false, error: 'profile is required', status: 400 }; + } + + try { + const result = await executeCodexLens(['reranker-model-download', resolvedProfile, '--json'], { timeout: 600000 }); // 10 min for download + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, ...parsed }; + } catch { + return { success: true, output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: Delete Reranker Model (delete reranker model by profile) + if (pathname === '/api/codexlens/reranker/models/delete' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { profile } = body as { profile?: unknown }; + const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined; + + if (!resolvedProfile) { + return { success: false, error: 'profile is required', status: 400 }; + } + + try { + const result = await executeCodexLens(['reranker-model-delete', resolvedProfile, '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + return { success: true, ...parsed }; + } catch { + return { success: true, output: result.output }; + } + } else { + return { success: false, error: result.error, status: 500 }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: Reranker Model Info (get reranker model info by profile) + if (pathname === '/api/codexlens/reranker/models/info' && req.method === 'GET') { + const profile = url.searchParams.get('profile'); + + if (!profile) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'profile parameter is required' })); + return true; + } + + try { + const result = await executeCodexLens(['reranker-model-info', profile, '--json']); + if (result.success) { + try { + const parsed = extractJSON(result.output ?? ''); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(parsed)); + } catch { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Failed to parse response' })); + } + } else { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: result.error })); + } + } catch (err: unknown) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // ============================================================ + // SPLADE ENDPOINTS + // ============================================================ + + // API: SPLADE Status - Check if SPLADE is available and installed + if (pathname === '/api/codexlens/splade/status') { + try { + // Check if CodexLens is installed first + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + available: false, + installed: false, + model: 'naver/splade-cocondenser-ensembledistil', + error: 'CodexLens not installed' + })); + return true; + } + + // Check SPLADE availability using Python check + const result = await executeCodexLens(['python', '-c', + 'from codexlens.semantic.splade_encoder import check_splade_available; ok, err = check_splade_available(); print(\"OK\" if ok else err)' + ]); + + const output = result.output ?? ''; + const available = output.includes('OK'); + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + available, + installed: available, + model: 'naver/splade-cocondenser-ensembledistil', + error: available ? null : output.trim() + })); + } catch (err: unknown) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + available: false, + installed: false, + model: 'naver/splade-cocondenser-ensembledistil', + error: err instanceof Error ? err.message : String(err) + })); + } + return true; + } + + // API: SPLADE Install - Install SPLADE dependencies + if (pathname === '/api/codexlens/splade/install' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + try { + const { gpu } = body as { gpu?: unknown }; + const useGpu = typeof gpu === 'boolean' ? gpu : false; + const packageName = useGpu ? 'codex-lens[splade-gpu]' : 'codex-lens[splade]'; + + // Use pip to install the SPLADE extras + const { promisify } = await import('util'); + const execFilePromise = promisify(require('child_process').execFile); + + const result = await execFilePromise('pip', ['install', packageName], { + timeout: 600000 // 10 minutes + }); + + return { + success: true, + message: `SPLADE installed successfully (${useGpu ? 'GPU' : 'CPU'} mode)`, + output: result.stdout + }; + } catch (err: unknown) { + const message = err instanceof Error ? err.message : String(err); + const stderr = (err as { stderr?: unknown })?.stderr; + return { + success: false, + error: message, + stderr: typeof stderr === 'string' ? stderr : undefined, + status: 500 + }; + } + }); + return true; + } + + // API: SPLADE Index Status - Check if SPLADE index exists for a project + if (pathname === '/api/codexlens/splade/index-status') { + try { + const projectPath = url.searchParams.get('path'); + if (!projectPath) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ success: false, error: 'Missing path parameter' })); + return true; + } + + // Check if CodexLens is installed first + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ exists: false, error: 'CodexLens not installed' })); + return true; + } + + const { join } = await import('path'); + const indexDb = join(projectPath, '.codexlens', '_index.db'); + + // Use Python to check SPLADE index status + const pythonCode = ` +from codexlens.storage.splade_index import SpladeIndex +from pathlib import Path +try: + idx = SpladeIndex(Path(\"${indexDb.replace(/\\\\/g, '\\\\\\\\')}\")) + if idx.has_index(): + stats = idx.get_stats() + meta = idx.get_metadata() + model = meta.get('model_name', '') if meta else '' + print(f\"OK|{stats['unique_chunks']}|{stats['total_postings']}|{model}\") + else: + print(\"NO_INDEX\") +except Exception as e: + print(f\"ERROR|{str(e)}\") +`; + + const result = await executeCodexLens(['python', '-c', pythonCode]); + + const output = result.output ?? ''; + if (output.startsWith('OK|')) { + const parts = output.trim().split('|'); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + exists: true, + chunks: parseInt(parts[1]), + postings: parseInt(parts[2]), + model: parts[3] + })); + } else if (output.startsWith('ERROR|')) { + const errorMsg = output.substring(6).trim(); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ exists: false, error: errorMsg })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ exists: false })); + } + } catch (err: unknown) { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ exists: false, error: err instanceof Error ? err.message : String(err) })); + } + return true; + } + + // API: SPLADE Index Rebuild - Rebuild SPLADE index for a project + if (pathname === '/api/codexlens/splade/rebuild' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { path: projectPath } = body as { path?: unknown }; + const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : undefined; + + if (!resolvedProjectPath) { + return { success: false, error: 'Missing path parameter', status: 400 }; + } + + try { + // Use 'index splade' instead of deprecated 'splade-index' + const result = await executeCodexLens(['index', 'splade', resolvedProjectPath, '--rebuild'], { + cwd: resolvedProjectPath, + timeout: 1800000 // 30 minutes for large codebases + }); + + if (result.success) { + return { + success: true, + message: 'SPLADE index rebuilt successfully', + output: result.output + }; + } else { + return { + success: false, + error: result.error || 'Failed to rebuild SPLADE index', + output: result.output, + status: 500 + }; + } + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + return false; +} diff --git a/ccw/src/core/routes/codexlens/utils.ts b/ccw/src/core/routes/codexlens/utils.ts new file mode 100644 index 00000000..f2bd14c0 --- /dev/null +++ b/ccw/src/core/routes/codexlens/utils.ts @@ -0,0 +1,96 @@ +/** + * CodexLens route utilities. + * + * CodexLens CLI can emit logging + ANSI escapes even with --json, so helpers + * here normalize output for reliable JSON parsing. + */ + +/** + * Strip ANSI color codes from string. + * Rich library adds color codes even with --json flag. + */ +export function stripAnsiCodes(str: string): string { + // ANSI escape code pattern: \x1b[...m or \x1b]... + return str.replace(/\x1b\[[0-9;]*m/g, '') + .replace(/\x1b\][0-9;]*\x07/g, '') + .replace(/\x1b\][^\x07]*\x07/g, ''); +} + +/** + * Format file size to human readable string. + */ +export function formatSize(bytes: number): string { + if (bytes === 0) return '0 B'; + const units = ['B', 'KB', 'MB', 'GB', 'TB']; + const k = 1024; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + const size = parseFloat((bytes / Math.pow(k, i)).toFixed(i < 2 ? 0 : 1)); + return size + ' ' + units[i]; +} + +/** + * Extract JSON from CLI output that may contain logging messages. + * CodexLens CLI outputs logs like "INFO ..." before the JSON. + * Also strips ANSI color codes that Rich library adds. + * Handles trailing content after JSON (e.g., "INFO: Done" messages). + */ +export function extractJSON(output: string): any { + // Strip ANSI color codes first + const cleanOutput = stripAnsiCodes(output); + + // Find the first { or [ character (start of JSON) + const jsonStart = cleanOutput.search(/[{\[]/); + if (jsonStart === -1) { + throw new Error('No JSON found in output'); + } + + const startChar = cleanOutput[jsonStart]; + const endChar = startChar === '{' ? '}' : ']'; + + // Find matching closing brace/bracket using a simple counter + let depth = 0; + let inString = false; + let escapeNext = false; + let jsonEnd = -1; + + for (let i = jsonStart; i < cleanOutput.length; i++) { + const char = cleanOutput[i]; + + if (escapeNext) { + escapeNext = false; + continue; + } + + if (char === '\\' && inString) { + escapeNext = true; + continue; + } + + if (char === '"') { + inString = !inString; + continue; + } + + if (!inString) { + if (char === startChar) { + depth++; + } else if (char === endChar) { + depth--; + if (depth === 0) { + jsonEnd = i + 1; + break; + } + } + } + } + + if (jsonEnd === -1) { + // Fallback: try to parse from start to end (original behavior) + const jsonString = cleanOutput.substring(jsonStart); + return JSON.parse(jsonString); + } + + const jsonString = cleanOutput.substring(jsonStart, jsonEnd); + return JSON.parse(jsonString); +} + diff --git a/ccw/src/core/routes/codexlens/watcher-handlers.ts b/ccw/src/core/routes/codexlens/watcher-handlers.ts new file mode 100644 index 00000000..606e58e0 --- /dev/null +++ b/ccw/src/core/routes/codexlens/watcher-handlers.ts @@ -0,0 +1,265 @@ +/** + * CodexLens file watcher handlers. + * + * Maintains watcher process state across requests to support dashboard controls. + */ + +import { + checkVenvStatus, + executeCodexLens, + getVenvPythonPath, +} from '../../../tools/codex-lens.js'; +import type { RouteContext } from '../types.js'; +import { extractJSON, stripAnsiCodes } from './utils.js'; + +// File watcher state (persisted across requests) +let watcherProcess: any = null; +let watcherStats = { + running: false, + root_path: '', + events_processed: 0, + start_time: null as Date | null +}; + +export async function stopWatcherForUninstall(): Promise { + if (!watcherStats.running || !watcherProcess) return; + + try { + watcherProcess.kill('SIGTERM'); + await new Promise(resolve => setTimeout(resolve, 500)); + if (watcherProcess && !watcherProcess.killed) { + watcherProcess.kill('SIGKILL'); + } + } catch { + // Ignore errors stopping watcher + } + + watcherStats = { + running: false, + root_path: '', + events_processed: 0, + start_time: null + }; + watcherProcess = null; +} + +/** + * Handle CodexLens watcher routes + * @returns true if route was handled, false otherwise + */ +export async function handleCodexLensWatcherRoutes(ctx: RouteContext): Promise { + const { pathname, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx; + + // API: Get File Watcher Status + if (pathname === '/api/codexlens/watch/status') { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + success: true, + running: watcherStats.running, + root_path: watcherStats.root_path, + events_processed: watcherStats.events_processed, + start_time: watcherStats.start_time?.toISOString() || null, + uptime_seconds: watcherStats.start_time + ? Math.floor((Date.now() - watcherStats.start_time.getTime()) / 1000) + : 0 + })); + return true; + } + + // API: Start File Watcher + if (pathname === '/api/codexlens/watch/start' && req.method === 'POST') { + handlePostRequest(req, res, async (body) => { + const { path: watchPath, debounce_ms = 1000 } = body as { path?: unknown; debounce_ms?: unknown }; + const targetPath = typeof watchPath === 'string' && watchPath.trim().length > 0 ? watchPath : initialPath; + const resolvedDebounceMs = typeof debounce_ms === 'number' ? debounce_ms : Number(debounce_ms); + const debounceMs = !Number.isNaN(resolvedDebounceMs) && resolvedDebounceMs > 0 ? resolvedDebounceMs : 1000; + + if (watcherStats.running) { + return { success: false, error: 'Watcher already running', status: 400 }; + } + + try { + const { spawn } = await import('child_process'); + const { existsSync, statSync } = await import('fs'); + + // Validate path exists and is a directory + if (!existsSync(targetPath)) { + return { success: false, error: `Path does not exist: ${targetPath}`, status: 400 }; + } + const pathStat = statSync(targetPath); + if (!pathStat.isDirectory()) { + return { success: false, error: `Path is not a directory: ${targetPath}`, status: 400 }; + } + + // Get the codexlens CLI path + const venvStatus = await checkVenvStatus(); + if (!venvStatus.ready) { + return { success: false, error: 'CodexLens not installed', status: 400 }; + } + + // Verify directory is indexed before starting watcher + try { + const statusResult = await executeCodexLens(['projects', 'list', '--json']); + if (statusResult.success && statusResult.output) { + const parsed = extractJSON(statusResult.output); + const projects = parsed.result || parsed || []; + const normalizedTarget = targetPath.toLowerCase().replace(/\\/g, '/'); + const isIndexed = Array.isArray(projects) && projects.some((p: { source_root?: string }) => + p.source_root && p.source_root.toLowerCase().replace(/\\/g, '/') === normalizedTarget + ); + if (!isIndexed) { + return { + success: false, + error: `Directory is not indexed: ${targetPath}. Run 'codexlens init' first.`, + status: 400 + }; + } + } + } catch (err) { + console.warn('[CodexLens] Could not verify index status:', err); + // Continue anyway - watcher will fail with proper error if not indexed + } + + // Spawn watch process using Python (no shell: true for security) + // CodexLens is a Python package, must run via python -m codexlens + const pythonPath = getVenvPythonPath(); + const args = ['-m', 'codexlens', 'watch', targetPath, '--debounce', String(debounceMs)]; + watcherProcess = spawn(pythonPath, args, { + cwd: targetPath, + stdio: ['ignore', 'pipe', 'pipe'], + env: { ...process.env } + }); + + watcherStats = { + running: true, + root_path: targetPath, + events_processed: 0, + start_time: new Date() + }; + + // Capture stderr for error messages (capped at 4KB to prevent memory leak) + const MAX_STDERR_SIZE = 4096; + let stderrBuffer = ''; + if (watcherProcess.stderr) { + watcherProcess.stderr.on('data', (data: Buffer) => { + stderrBuffer += data.toString(); + // Cap buffer size to prevent memory leak in long-running watchers + if (stderrBuffer.length > MAX_STDERR_SIZE) { + stderrBuffer = stderrBuffer.slice(-MAX_STDERR_SIZE); + } + }); + } + + // Handle process output for event counting + if (watcherProcess.stdout) { + watcherProcess.stdout.on('data', (data: Buffer) => { + const output = data.toString(); + // Count processed events from output + const matches = output.match(/Processed \d+ events?/g); + if (matches) { + watcherStats.events_processed += matches.length; + } + }); + } + + // Handle spawn errors (e.g., ENOENT) + watcherProcess.on('error', (err: Error) => { + console.error(`[CodexLens] Watcher spawn error: ${err.message}`); + watcherStats.running = false; + watcherProcess = null; + broadcastToClients({ + type: 'CODEXLENS_WATCHER_STATUS', + payload: { running: false, error: `Spawn error: ${err.message}` } + }); + }); + + // Handle process exit + watcherProcess.on('exit', (code: number) => { + watcherStats.running = false; + watcherProcess = null; + console.log(`[CodexLens] Watcher exited with code ${code}`); + + // Broadcast error if exited with non-zero code + if (code !== 0) { + const errorMsg = stderrBuffer.trim() || `Exited with code ${code}`; + const cleanError = stripAnsiCodes(errorMsg); + broadcastToClients({ + type: 'CODEXLENS_WATCHER_STATUS', + payload: { running: false, error: cleanError } + }); + } else { + broadcastToClients({ + type: 'CODEXLENS_WATCHER_STATUS', + payload: { running: false } + }); + } + }); + + // Broadcast watcher started + broadcastToClients({ + type: 'CODEXLENS_WATCHER_STATUS', + payload: { running: true, path: targetPath } + }); + + return { + success: true, + message: 'Watcher started', + path: targetPath, + pid: watcherProcess.pid + }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + // API: Stop File Watcher + if (pathname === '/api/codexlens/watch/stop' && req.method === 'POST') { + handlePostRequest(req, res, async () => { + if (!watcherStats.running || !watcherProcess) { + return { success: false, error: 'Watcher not running', status: 400 }; + } + + try { + watcherProcess.kill('SIGTERM'); + await new Promise(resolve => setTimeout(resolve, 500)); + if (watcherProcess && !watcherProcess.killed) { + watcherProcess.kill('SIGKILL'); + } + + const finalStats = { + events_processed: watcherStats.events_processed, + uptime_seconds: watcherStats.start_time + ? Math.floor((Date.now() - watcherStats.start_time.getTime()) / 1000) + : 0 + }; + + watcherStats = { + running: false, + root_path: '', + events_processed: 0, + start_time: null + }; + watcherProcess = null; + + broadcastToClients({ + type: 'CODEXLENS_WATCHER_STATUS', + payload: { running: false } + }); + + return { + success: true, + message: 'Watcher stopped', + ...finalStats + }; + } catch (err: unknown) { + return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 }; + } + }); + return true; + } + + return false; +} + diff --git a/ccw/src/core/routes/discovery-routes.ts b/ccw/src/core/routes/discovery-routes.ts index 8ba997f2..333b94ca 100644 --- a/ccw/src/core/routes/discovery-routes.ts +++ b/ccw/src/core/routes/discovery-routes.ts @@ -1,4 +1,3 @@ -// @ts-nocheck /** * Discovery Routes Module * @@ -24,19 +23,9 @@ * - PATCH /api/discoveries/:id/findings/:fid - Update finding status * - DELETE /api/discoveries/:id - Delete discovery session */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, existsSync, writeFileSync, mkdirSync, readdirSync, rmSync } from 'fs'; import { join } from 'path'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; // ========== Helper Functions ========== diff --git a/ccw/src/core/routes/files-routes.ts b/ccw/src/core/routes/files-routes.ts index ff09d919..c74d46fa 100644 --- a/ccw/src/core/routes/files-routes.ts +++ b/ccw/src/core/routes/files-routes.ts @@ -1,21 +1,11 @@ -// @ts-nocheck /** * Files Routes Module * Handles all file browsing related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { existsSync, readFileSync, readdirSync, statSync } from 'fs'; import { join } from 'path'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import { validatePath as validateAllowedPath } from '../../utils/path-validator.js'; +import type { RouteContext } from './types.js'; // ======================================== // Constants @@ -78,6 +68,39 @@ const EXT_TO_LANGUAGE = { '.svelte': 'html' }; +interface ExplorerFileEntry { + name: string; + type: 'directory' | 'file'; + path: string; + hasClaudeMd?: boolean; +} + +interface ExplorerDirectoryFilesResult { + path?: string; + files: ExplorerFileEntry[]; + gitignorePatterns?: string[]; + error?: string; +} + +interface ExplorerFileContentResult { + error?: string; + content?: string; + language?: string; + isMarkdown?: boolean; + fileName?: string; + path?: string; + size?: number; + lines?: number; +} + +interface UpdateClaudeMdResult { + success?: boolean; + error?: string; + message?: string; + output?: string; + path?: string; +} + // ======================================== // Helper Functions // ======================================== @@ -87,7 +110,7 @@ const EXT_TO_LANGUAGE = { * @param {string} gitignorePath - Path to .gitignore file * @returns {string[]} Array of gitignore patterns */ -function parseGitignore(gitignorePath) { +function parseGitignore(gitignorePath: string): string[] { try { if (!existsSync(gitignorePath)) return []; const content = readFileSync(gitignorePath, 'utf8'); @@ -108,7 +131,7 @@ function parseGitignore(gitignorePath) { * @param {boolean} isDirectory - Whether the entry is a directory * @returns {boolean} */ -function shouldIgnore(name, patterns, isDirectory) { +function shouldIgnore(name: string, patterns: string[], isDirectory: boolean): boolean { // Always exclude certain directories if (isDirectory && EXPLORER_EXCLUDE_DIRS.includes(name)) { return true; @@ -155,7 +178,7 @@ function shouldIgnore(name, patterns, isDirectory) { * @param {string} dirPath - Directory path to list * @returns {Promise} */ -async function listDirectoryFiles(dirPath) { +async function listDirectoryFiles(dirPath: string): Promise { try { // Normalize path let normalizedPath = dirPath.replace(/\\/g, '/'); @@ -178,7 +201,7 @@ async function listDirectoryFiles(dirPath) { // Read directory entries const entries = readdirSync(normalizedPath, { withFileTypes: true }); - const files = []; + const files: ExplorerFileEntry[] = []; for (const entry of entries) { const isDirectory = entry.isDirectory(); @@ -188,7 +211,7 @@ async function listDirectoryFiles(dirPath) { } const entryPath = join(normalizedPath, entry.name); - const fileInfo = { + const fileInfo: ExplorerFileEntry = { name: entry.name, type: isDirectory ? 'directory' : 'file', path: entryPath.replace(/\\/g, '/') @@ -226,7 +249,7 @@ async function listDirectoryFiles(dirPath) { * @param {string} filePath - Path to file * @returns {Promise} */ -async function getFileContent(filePath) { +async function getFileContent(filePath: string): Promise { try { // Normalize path let normalizedPath = filePath.replace(/\\/g, '/'); @@ -251,9 +274,11 @@ async function getFileContent(filePath) { // Read file content const content = readFileSync(normalizedPath, 'utf8'); const ext = normalizedPath.substring(normalizedPath.lastIndexOf('.')).toLowerCase(); - const language = EXT_TO_LANGUAGE[ext] || 'plaintext'; + const language = Object.prototype.hasOwnProperty.call(EXT_TO_LANGUAGE, ext) + ? EXT_TO_LANGUAGE[ext as keyof typeof EXT_TO_LANGUAGE] + : 'plaintext'; const isMarkdown = ext === '.md' || ext === '.markdown'; - const fileName = normalizedPath.split('/').pop(); + const fileName = normalizedPath.split('/').pop() ?? normalizedPath; return { content, @@ -277,7 +302,7 @@ async function getFileContent(filePath) { * @param {string} strategy - Update strategy (single-layer, multi-layer) * @returns {Promise} */ -async function triggerUpdateClaudeMd(targetPath, tool, strategy) { +async function triggerUpdateClaudeMd(targetPath: string, tool: string, strategy: string): Promise { const { spawn } = await import('child_process'); // Normalize path @@ -303,7 +328,7 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) { console.log(`[Explorer] Running async: ccw tool exec update_module_claude with ${tool} (${strategy})`); - return new Promise((resolve) => { + return new Promise((resolve) => { const isWindows = process.platform === 'win32'; // Spawn the process @@ -316,34 +341,39 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) { let stdout = ''; let stderr = ''; - child.stdout.on('data', (data) => { + child.stdout.on('data', (data: Buffer) => { stdout += data.toString(); }); - child.stderr.on('data', (data) => { + child.stderr.on('data', (data: Buffer) => { stderr += data.toString(); }); - child.on('close', (code) => { + child.on('close', (code: number | null) => { if (code === 0) { // Parse the JSON output from the tool - let result; + let result: unknown; try { result = JSON.parse(stdout); } catch { result = { output: stdout }; } - if (result.success === false || result.error) { + const parsed = typeof result === 'object' && result !== null ? (result as Record) : null; + const parsedSuccess = typeof parsed?.success === 'boolean' ? parsed.success : undefined; + const parsedError = typeof parsed?.error === 'string' ? parsed.error : undefined; + const parsedMessage = typeof parsed?.message === 'string' ? parsed.message : undefined; + + if (parsedSuccess === false || parsedError) { resolve({ success: false, - error: result.error || result.message || 'Update failed', + error: parsedError || parsedMessage || 'Update failed', output: stdout }); } else { resolve({ success: true, - message: result.message || `CLAUDE.md updated successfully using ${tool} (${strategy})`, + message: parsedMessage || `CLAUDE.md updated successfully using ${tool} (${strategy})`, output: stdout, path: normalizedPath }); @@ -357,11 +387,11 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) { } }); - child.on('error', (error) => { + child.on('error', (error: unknown) => { console.error('Error spawning process:', error); resolve({ success: false, - error: (error as Error).message, + error: error instanceof Error ? error.message : String(error), output: '' }); }); @@ -392,9 +422,19 @@ export async function handleFilesRoutes(ctx: RouteContext): Promise { // API: List directory files with .gitignore filtering (Explorer view) if (pathname === '/api/files') { const dirPath = url.searchParams.get('path') || initialPath; - const filesData = await listDirectoryFiles(dirPath); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(filesData)); + + try { + const validatedDir = await validateAllowedPath(dirPath, { mustExist: true, allowedDirectories: [initialPath] }); + const filesData = await listDirectoryFiles(validatedDir); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(filesData)); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Files] Path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path', files: [] })); + } return true; } @@ -406,20 +446,52 @@ export async function handleFilesRoutes(ctx: RouteContext): Promise { res.end(JSON.stringify({ error: 'File path is required' })); return true; } - const fileData = await getFileContent(filePath); - res.writeHead(fileData.error ? 404 : 200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(fileData)); + + try { + const validatedFile = await validateAllowedPath(filePath, { mustExist: true, allowedDirectories: [initialPath] }); + const fileData = await getFileContent(validatedFile); + res.writeHead(fileData.error ? 404 : 200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(fileData)); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Files] Path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' })); + } return true; } // API: Update CLAUDE.md using CLI tools (Explorer view) if (pathname === '/api/update-claude-md' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { path: targetPath, tool = 'gemini', strategy = 'single-layer' } = body; - if (!targetPath) { + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } + + const { + path: targetPath, + tool = 'gemini', + strategy = 'single-layer' + } = body as { path?: unknown; tool?: unknown; strategy?: unknown }; + + if (typeof targetPath !== 'string' || targetPath.trim().length === 0) { return { error: 'path is required', status: 400 }; } - return await triggerUpdateClaudeMd(targetPath, tool, strategy); + + try { + const validatedPath = await validateAllowedPath(targetPath, { mustExist: true, allowedDirectories: [initialPath] }); + return await triggerUpdateClaudeMd( + validatedPath, + typeof tool === 'string' ? tool : 'gemini', + typeof strategy === 'string' ? strategy : 'single-layer' + ); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Files] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } }); return true; } diff --git a/ccw/src/core/routes/graph-routes.ts b/ccw/src/core/routes/graph-routes.ts index 74efff04..6c070721 100644 --- a/ccw/src/core/routes/graph-routes.ts +++ b/ccw/src/core/routes/graph-routes.ts @@ -2,21 +2,12 @@ * Graph Routes Module * Handles graph visualization API endpoints for codex-lens data */ -import type { IncomingMessage, ServerResponse } from 'http'; import { homedir } from 'os'; import { join, resolve, normalize } from 'path'; import { existsSync, readdirSync } from 'fs'; import Database from 'better-sqlite3'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import { validatePath as validateAllowedPath } from '../../utils/path-validator.js'; +import type { RouteContext } from './types.js'; /** * PathMapper utility class (simplified from codex-lens Python implementation) @@ -82,28 +73,22 @@ interface ImpactAnalysis { * Validate and sanitize project path to prevent path traversal attacks * @returns sanitized absolute path or null if invalid */ -function validateProjectPath(projectPath: string, initialPath: string): string | null { - if (!projectPath) { - return initialPath; +type ProjectPathValidationResult = + | { path: string; status: 200 } + | { path: null; status: number; error: string }; + +async function validateProjectPath(projectPath: string, initialPath: string): Promise { + const candidate = projectPath || initialPath; + + try { + const validated = await validateAllowedPath(candidate, { mustExist: true, allowedDirectories: [initialPath] }); + return { path: validated, status: 200 }; + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Graph] Project path validation failed: ${message}`); + return { path: null, status, error: status === 403 ? 'Access denied' : 'Invalid project path' }; } - - // Resolve to absolute path - const resolved = resolve(projectPath); - const normalized = normalize(resolved); - - // Check for path traversal attempts - if (normalized.includes('..') || normalized !== resolved) { - console.error(`[Graph] Path traversal attempt blocked: ${projectPath}`); - return null; - } - - // Ensure path exists and is a directory - if (!existsSync(normalized)) { - console.error(`[Graph] Path does not exist: ${normalized}`); - return null; - } - - return normalized; } /** @@ -440,18 +425,20 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise { // API: Graph Nodes - Get all symbols as graph nodes if (pathname === '/api/graph/nodes') { const rawPath = url.searchParams.get('path') || initialPath; - const projectPath = validateProjectPath(rawPath, initialPath); + const projectPathResult = await validateProjectPath(rawPath, initialPath); const limitStr = url.searchParams.get('limit') || '1000'; const limit = Math.min(parseInt(limitStr, 10) || 1000, 5000); // Max 5000 nodes const fileFilter = url.searchParams.get('file') || undefined; const moduleFilter = url.searchParams.get('module') || undefined; - if (!projectPath) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Invalid project path', nodes: [] })); + if (projectPathResult.path === null) { + res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: projectPathResult.error, nodes: [] })); return true; } + const projectPath = projectPathResult.path; + try { const allNodes = await querySymbols(projectPath, fileFilter, moduleFilter); const nodes = allNodes.slice(0, limit); @@ -474,18 +461,20 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise { // API: Graph Edges - Get all relationships as graph edges if (pathname === '/api/graph/edges') { const rawPath = url.searchParams.get('path') || initialPath; - const projectPath = validateProjectPath(rawPath, initialPath); + const projectPathResult = await validateProjectPath(rawPath, initialPath); const limitStr = url.searchParams.get('limit') || '2000'; const limit = Math.min(parseInt(limitStr, 10) || 2000, 10000); // Max 10000 edges const fileFilter = url.searchParams.get('file') || undefined; const moduleFilter = url.searchParams.get('module') || undefined; - if (!projectPath) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Invalid project path', edges: [] })); + if (projectPathResult.path === null) { + res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: projectPathResult.error, edges: [] })); return true; } + const projectPath = projectPathResult.path; + try { const allEdges = await queryRelationships(projectPath, fileFilter, moduleFilter); const edges = allEdges.slice(0, limit); @@ -508,14 +497,16 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise { // API: Get available files and modules for filtering if (pathname === '/api/graph/files') { const rawPath = url.searchParams.get('path') || initialPath; - const projectPath = validateProjectPath(rawPath, initialPath); + const projectPathResult = await validateProjectPath(rawPath, initialPath); - if (!projectPath) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Invalid project path', files: [], modules: [] })); + if (projectPathResult.path === null) { + res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: projectPathResult.error, files: [], modules: [] })); return true; } + const projectPath = projectPathResult.path; + try { const mapper = new PathMapper(); const rootDbPath = mapper.sourceToIndexDb(projectPath); @@ -570,15 +561,17 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise { // API: Impact Analysis - Get impact analysis for a symbol if (pathname === '/api/graph/impact') { const rawPath = url.searchParams.get('path') || initialPath; - const projectPath = validateProjectPath(rawPath, initialPath); + const projectPathResult = await validateProjectPath(rawPath, initialPath); const symbolId = url.searchParams.get('symbol'); - if (!projectPath) { - res.writeHead(400, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Invalid project path', directDependents: [], affectedFiles: [] })); + if (projectPathResult.path === null) { + res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: projectPathResult.error, directDependents: [], affectedFiles: [] })); return true; } + const projectPath = projectPathResult.path; + if (!symbolId) { res.writeHead(400, { 'Content-Type': 'application/json' }); res.end(JSON.stringify({ error: 'symbol parameter is required', directDependents: [], affectedFiles: [] })); diff --git a/ccw/src/core/routes/help-routes.ts b/ccw/src/core/routes/help-routes.ts index 6a3e97b3..bbea2398 100644 --- a/ccw/src/core/routes/help-routes.ts +++ b/ccw/src/core/routes/help-routes.ts @@ -1,22 +1,11 @@ -// @ts-nocheck /** * Help Routes Module * Handles all Help-related API endpoints for command guide and CodexLens docs */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, existsSync, watch } from 'fs'; import { join } from 'path'; import { homedir } from 'os'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; // ========== In-Memory Cache ========== interface CacheEntry { @@ -101,6 +90,7 @@ function initializeFileWatchers(): void { }); watchersInitialized = true; + (watcher as any).unref?.(); console.log(`File watchers initialized for: ${indexDir}`); } catch (error) { console.error('Failed to initialize file watchers:', error); diff --git a/ccw/src/core/routes/hooks-routes.ts b/ccw/src/core/routes/hooks-routes.ts index 7d85647e..30ca13e3 100644 --- a/ccw/src/core/routes/hooks-routes.ts +++ b/ccw/src/core/routes/hooks-routes.ts @@ -1,21 +1,14 @@ -// @ts-nocheck /** * Hooks Routes Module * Handles all hooks-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs'; import { join, dirname } from 'path'; import { homedir } from 'os'; -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; +import type { RouteContext } from './types.js'; + +interface HooksRouteContext extends RouteContext { extractSessionIdFromPath: (filePath: string) => string | null; } @@ -30,7 +23,7 @@ const GLOBAL_SETTINGS_PATH = join(homedir(), '.claude', 'settings.json'); * @param {string} projectPath * @returns {string} */ -function getProjectSettingsPath(projectPath) { +function getProjectSettingsPath(projectPath: string): string { // path.join automatically handles cross-platform path separators return join(projectPath, '.claude', 'settings.json'); } @@ -40,7 +33,7 @@ function getProjectSettingsPath(projectPath) { * @param {string} filePath * @returns {Object} */ -function readSettingsFile(filePath) { +function readSettingsFile(filePath: string): Record { try { if (!existsSync(filePath)) { return {}; @@ -58,7 +51,7 @@ function readSettingsFile(filePath) { * @param {string} projectPath * @returns {Object} */ -function getHooksConfig(projectPath) { +function getHooksConfig(projectPath: string): { global: { path: string; hooks: unknown }; project: { path: string | null; hooks: unknown } } { const globalSettings = readSettingsFile(GLOBAL_SETTINGS_PATH); const projectSettingsPath = projectPath ? getProjectSettingsPath(projectPath) : null; const projectSettings = projectSettingsPath ? readSettingsFile(projectSettingsPath) : {}; @@ -66,11 +59,11 @@ function getHooksConfig(projectPath) { return { global: { path: GLOBAL_SETTINGS_PATH, - hooks: globalSettings.hooks || {} + hooks: (globalSettings as { hooks?: unknown }).hooks || {} }, project: { path: projectSettingsPath, - hooks: projectSettings.hooks || {} + hooks: (projectSettings as { hooks?: unknown }).hooks || {} } }; } @@ -83,15 +76,18 @@ function getHooksConfig(projectPath) { * @param {Object} hookData - Hook configuration * @returns {Object} */ -function saveHookToSettings(projectPath, scope, event, hookData) { +function saveHookToSettings( + projectPath: string, + scope: 'global' | 'project', + event: string, + hookData: Record & { replaceIndex?: unknown } +): Record { try { const filePath = scope === 'global' ? GLOBAL_SETTINGS_PATH : getProjectSettingsPath(projectPath); - const settings = readSettingsFile(filePath); + const settings = readSettingsFile(filePath) as Record & { hooks?: Record }; // Ensure hooks object exists - if (!settings.hooks) { - settings.hooks = {}; - } + settings.hooks = settings.hooks || {}; // Ensure the event array exists if (!settings.hooks[event]) { @@ -104,15 +100,16 @@ function saveHookToSettings(projectPath, scope, event, hookData) { } // Check if we're replacing an existing hook - if (hookData.replaceIndex !== undefined) { + if (typeof hookData.replaceIndex === 'number') { const index = hookData.replaceIndex; delete hookData.replaceIndex; - if (index >= 0 && index < settings.hooks[event].length) { - settings.hooks[event][index] = hookData; + const hooksForEvent = settings.hooks[event] as unknown[]; + if (index >= 0 && index < hooksForEvent.length) { + hooksForEvent[index] = hookData; } } else { // Add new hook - settings.hooks[event].push(hookData); + (settings.hooks[event] as unknown[]).push(hookData); } // Ensure directory exists and write file @@ -141,10 +138,15 @@ function saveHookToSettings(projectPath, scope, event, hookData) { * @param {number} hookIndex - Index of hook to delete * @returns {Object} */ -function deleteHookFromSettings(projectPath, scope, event, hookIndex) { +function deleteHookFromSettings( + projectPath: string, + scope: 'global' | 'project', + event: string, + hookIndex: number +): Record { try { const filePath = scope === 'global' ? GLOBAL_SETTINGS_PATH : getProjectSettingsPath(projectPath); - const settings = readSettingsFile(filePath); + const settings = readSettingsFile(filePath) as Record & { hooks?: Record }; if (!settings.hooks || !settings.hooks[event]) { return { error: 'Hook not found' }; @@ -155,15 +157,17 @@ function deleteHookFromSettings(projectPath, scope, event, hookIndex) { settings.hooks[event] = [settings.hooks[event]]; } - if (hookIndex < 0 || hookIndex >= settings.hooks[event].length) { + const hooksForEvent = settings.hooks[event] as unknown[]; + + if (hookIndex < 0 || hookIndex >= hooksForEvent.length) { return { error: 'Invalid hook index' }; } // Remove the hook - settings.hooks[event].splice(hookIndex, 1); + hooksForEvent.splice(hookIndex, 1); // Remove empty event arrays - if (settings.hooks[event].length === 0) { + if (hooksForEvent.length === 0) { delete settings.hooks[event]; } @@ -197,18 +201,29 @@ function deleteHookFromSettings(projectPath, scope, event, hookIndex) { * Handle hooks routes * @returns true if route was handled, false otherwise */ -export async function handleHooksRoutes(ctx: RouteContext): Promise { +export async function handleHooksRoutes(ctx: HooksRouteContext): Promise { const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients, extractSessionIdFromPath } = ctx; // API: Hook endpoint for Claude Code notifications if (pathname === '/api/hook' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { type, filePath, sessionId, ...extraData } = body; + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } + + const payload = body as Record; + const type = payload.type; + const filePath = payload.filePath; + const sessionId = payload.sessionId; + const extraData: Record = { ...payload }; + delete extraData.type; + delete extraData.filePath; + delete extraData.sessionId; // Determine session ID from file path if not provided - let resolvedSessionId = sessionId; - if (!resolvedSessionId && filePath) { - resolvedSessionId = extractSessionIdFromPath(filePath); + let resolvedSessionId = typeof sessionId === 'string' ? sessionId : undefined; + if (!resolvedSessionId && typeof filePath === 'string') { + resolvedSessionId = extractSessionIdFromPath(filePath) ?? undefined; } // Handle context hooks (session-start, context) @@ -226,7 +241,7 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise { const index = await clusteringService.getProgressiveIndex({ type: type as 'session-start' | 'context', sessionId: resolvedSessionId, - prompt: extraData.prompt // Pass user prompt for intent matching + prompt: typeof extraData.prompt === 'string' ? extraData.prompt : undefined // Pass user prompt for intent matching }); // Return context directly @@ -253,10 +268,10 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise { // Broadcast to all connected WebSocket clients const notification = { - type: type || 'session_updated', + type: typeof type === 'string' && type.trim().length > 0 ? type : 'session_updated', payload: { sessionId: resolvedSessionId, - filePath: filePath, + filePath: typeof filePath === 'string' ? filePath : undefined, timestamp: new Date().toISOString(), ...extraData // Pass through toolName, status, result, params, error, etc. } @@ -365,7 +380,7 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise { // API: Get hooks configuration if (pathname === '/api/hooks' && req.method === 'GET') { const projectPathParam = url.searchParams.get('path'); - const hooksData = getHooksConfig(projectPathParam); + const hooksData = getHooksConfig(projectPathParam || initialPath); res.writeHead(200, { 'Content-Type': 'application/json' }); res.end(JSON.stringify(hooksData)); return true; @@ -374,11 +389,23 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise { // API: Save hook if (pathname === '/api/hooks' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { projectPath, scope, event, hookData } = body; - if (!scope || !event || !hookData) { + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } + + const { projectPath, scope, event, hookData } = body as { + projectPath?: unknown; + scope?: unknown; + event?: unknown; + hookData?: unknown; + }; + + if ((scope !== 'global' && scope !== 'project') || typeof event !== 'string' || typeof hookData !== 'object' || hookData === null) { return { error: 'scope, event, and hookData are required', status: 400 }; } - return saveHookToSettings(projectPath, scope, event, hookData); + + const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath; + return saveHookToSettings(resolvedProjectPath, scope, event, hookData as Record); }); return true; } @@ -386,11 +413,23 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise { // API: Delete hook if (pathname === '/api/hooks' && req.method === 'DELETE') { handlePostRequest(req, res, async (body) => { - const { projectPath, scope, event, hookIndex } = body; - if (!scope || !event || hookIndex === undefined) { + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } + + const { projectPath, scope, event, hookIndex } = body as { + projectPath?: unknown; + scope?: unknown; + event?: unknown; + hookIndex?: unknown; + }; + + if ((scope !== 'global' && scope !== 'project') || typeof event !== 'string' || typeof hookIndex !== 'number') { return { error: 'scope, event, and hookIndex are required', status: 400 }; } - return deleteHookFromSettings(projectPath, scope, event, hookIndex); + + const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath; + return deleteHookFromSettings(resolvedProjectPath, scope, event, hookIndex); }); return true; } diff --git a/ccw/src/core/routes/issue-routes.ts b/ccw/src/core/routes/issue-routes.ts index b8c1304a..e8590972 100644 --- a/ccw/src/core/routes/issue-routes.ts +++ b/ccw/src/core/routes/issue-routes.ts @@ -1,4 +1,3 @@ -// @ts-nocheck /** * Issue Routes Module (Optimized - Flat JSONL Storage) * @@ -23,19 +22,9 @@ * - GET /api/queue - Get execution queue * - POST /api/queue/reorder - Reorder queue items */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, existsSync, writeFileSync, mkdirSync, unlinkSync } from 'fs'; import { join } from 'path'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; // ========== JSONL Helper Functions ========== diff --git a/ccw/src/core/routes/litellm-api-routes.ts b/ccw/src/core/routes/litellm-api-routes.ts index 791f82be..6624cf94 100644 --- a/ccw/src/core/routes/litellm-api-routes.ts +++ b/ccw/src/core/routes/litellm-api-routes.ts @@ -1,12 +1,11 @@ -// @ts-nocheck /** * LiteLLM API Routes Module * Handles LiteLLM provider management, endpoint configuration, and cache management */ -import type { IncomingMessage, ServerResponse } from 'http'; import { fileURLToPath } from 'url'; import { dirname, join as pathJoin } from 'path'; import { getSystemPython } from '../../utils/python-utils.js'; +import type { RouteContext } from './types.js'; // Get current module path for package-relative lookups const __filename = fileURLToPath(import.meta.url); @@ -66,14 +65,20 @@ export function clearCcwLitellmStatusCache() { ccwLitellmStatusCache.timestamp = 0; } -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; +function sanitizeProviderForResponse(provider: any): any { + if (!provider) return provider; + return { + ...provider, + apiKey: '***', + apiKeys: Array.isArray(provider.apiKeys) + ? provider.apiKeys.map((entry: any) => ({ ...entry, key: '***' })) + : provider.apiKeys, + }; +} + +function sanitizeRotationEndpointForResponse(endpoint: any): any { + if (!endpoint) return endpoint; + return { ...endpoint, api_key: '***' }; } // =========================== @@ -83,11 +88,11 @@ export interface RouteContext { interface ModelInfo { id: string; name: string; - provider: ProviderType; + provider: string; description?: string; } -const PROVIDER_MODELS: Record = { +const PROVIDER_MODELS: Record = { openai: [ { id: 'gpt-4-turbo', name: 'GPT-4 Turbo', provider: 'openai', description: '128K context' }, { id: 'gpt-4', name: 'GPT-4', provider: 'openai', description: '8K context' }, @@ -132,7 +137,7 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise Promise) => void; - broadcastToClients: (data: unknown) => void; +function isChatMessage(value: unknown): value is ChatMessage { + if (typeof value !== 'object' || value === null) return false; + const candidate = value as Record; + const role = candidate.role; + const content = candidate.content; + if (role !== 'system' && role !== 'user' && role !== 'assistant') return false; + return typeof content === 'string'; } /** @@ -29,9 +28,9 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise { const status = await getLiteLLMStatus(); res.writeHead(200, { 'Content-Type': 'application/json' }); res.end(JSON.stringify(status)); - } catch (err) { + } catch (err: unknown) { res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ available: false, error: err.message })); + res.end(JSON.stringify({ available: false, error: err instanceof Error ? err.message : String(err) })); } return true; } @@ -43,9 +42,9 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise { const config = await client.getConfig(); res.writeHead(200, { 'Content-Type': 'application/json' }); res.end(JSON.stringify(config)); - } catch (err) { + } catch (err: unknown) { res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: err.message })); + res.end(JSON.stringify({ error: err instanceof Error ? err.message : String(err) })); } return true; } @@ -53,9 +52,13 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise { // API: LiteLLM Embed - Generate embeddings if (pathname === '/api/litellm/embed' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { texts, model = 'default' } = body; + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } - if (!texts || !Array.isArray(texts)) { + const { texts, model = 'default' } = body as { texts?: unknown; model?: unknown }; + + if (!Array.isArray(texts) || texts.some((t) => typeof t !== 'string')) { return { error: 'texts array is required', status: 400 }; } @@ -65,10 +68,10 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise { try { const client = getLiteLLMClient(); - const result = await client.embed(texts, model); + const result = await client.embed(texts, typeof model === 'string' ? model : 'default'); return { success: true, ...result }; - } catch (err) { - return { error: err.message, status: 500 }; + } catch (err: unknown) { + return { error: err instanceof Error ? err.message : String(err), status: 500 }; } }); return true; @@ -77,27 +80,32 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise { // API: LiteLLM Chat - Chat with LLM if (pathname === '/api/litellm/chat' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { message, messages, model = 'default' } = body; + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } + + const { message, messages, model = 'default' } = body as { message?: unknown; messages?: unknown; model?: unknown }; // Support both single message and messages array - if (!message && (!messages || !Array.isArray(messages))) { + if (typeof message !== 'string' && (!Array.isArray(messages) || !messages.every(isChatMessage))) { return { error: 'message or messages array is required', status: 400 }; } try { const client = getLiteLLMClient(); - if (messages && Array.isArray(messages)) { + if (Array.isArray(messages) && messages.every(isChatMessage)) { // Multi-turn chat - const result = await client.chatMessages(messages, model); + const result = await client.chatMessages(messages, typeof model === 'string' ? model : 'default'); return { success: true, ...result }; } else { // Single message chat - const content = await client.chat(message, model); - return { success: true, content, model }; + const resolvedModel = typeof model === 'string' ? model : 'default'; + const content = await client.chat(message as string, resolvedModel); + return { success: true, content, model: resolvedModel }; } - } catch (err) { - return { error: err.message, status: 500 }; + } catch (err: unknown) { + return { error: err instanceof Error ? err.message : String(err), status: 500 }; } }); return true; diff --git a/ccw/src/core/routes/mcp-routes.ts b/ccw/src/core/routes/mcp-routes.ts index b9164ec1..bbb553ba 100644 --- a/ccw/src/core/routes/mcp-routes.ts +++ b/ccw/src/core/routes/mcp-routes.ts @@ -1,13 +1,12 @@ -// @ts-nocheck /** * MCP Routes Module * Handles all MCP-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync, statSync } from 'fs'; import { join, dirname } from 'path'; import { homedir } from 'os'; import * as McpTemplatesDb from './mcp-templates-db.js'; +import type { RouteContext } from './types.js'; // Claude config file path const CLAUDE_CONFIG_PATH = join(homedir(), '.claude.json'); @@ -432,16 +431,6 @@ function toggleCodexMcpServer(serverName: string, enabled: boolean): { success?: } } -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} - // ======================================== // Helper Functions // ======================================== @@ -464,7 +453,7 @@ function getEnterpriseMcpPath(): string { /** * Safely read and parse JSON file */ -function safeReadJson(filePath) { +function safeReadJson(filePath: string): any | null { try { if (!existsSync(filePath)) return null; const content = readFileSync(filePath, 'utf8'); @@ -479,8 +468,8 @@ function safeReadJson(filePath) { * @param {string} filePath * @returns {Object} mcpServers object or empty object */ -function getMcpServersFromFile(filePath) { - const config = safeReadJson(filePath); +function getMcpServersFromFile(filePath: string): Record { + const config = safeReadJson(filePath) as { mcpServers?: Record } | null; if (!config) return {}; return config.mcpServers || {}; } @@ -492,7 +481,7 @@ function getMcpServersFromFile(filePath) { * @param {Object} serverConfig - MCP server configuration * @returns {Object} Result with success/error */ -function addMcpServerToMcpJson(projectPath, serverName, serverConfig) { +function addMcpServerToMcpJson(projectPath: string, serverName: string, serverConfig: unknown) { try { const normalizedPath = normalizePathForFileSystem(projectPath); const mcpJsonPath = join(normalizedPath, '.mcp.json'); @@ -530,7 +519,7 @@ function addMcpServerToMcpJson(projectPath, serverName, serverConfig) { * @param {string} serverName - MCP server name * @returns {Object} Result with success/error */ -function removeMcpServerFromMcpJson(projectPath, serverName) { +function removeMcpServerFromMcpJson(projectPath: string, serverName: string) { try { const normalizedPath = normalizePathForFileSystem(projectPath); const mcpJsonPath = join(normalizedPath, '.mcp.json'); @@ -562,6 +551,26 @@ function removeMcpServerFromMcpJson(projectPath, serverName) { } } +type McpServerConfig = Record; +type McpServers = Record; +type ProjectConfig = { + mcpServers?: McpServers; + mcpJsonPath?: string; + hasMcpJson?: boolean; + [key: string]: unknown; +}; +type ProjectsConfig = Record; +type ConfigSource = { type: string; path: string; count: number }; + +interface McpConfig { + projects: ProjectsConfig; + userServers: McpServers; + enterpriseServers: McpServers; + globalServers: McpServers; + configSources: ConfigSource[]; + error?: string; +} + /** * Get MCP configuration from multiple sources (per official Claude Code docs): * @@ -575,12 +584,13 @@ function removeMcpServerFromMcpJson(projectPath, serverName) { * * @returns {Object} */ -function getMcpConfig() { +function getMcpConfig(): McpConfig { try { - const result = { + const result: McpConfig = { projects: {}, userServers: {}, // User-level servers from ~/.claude.json mcpServers enterpriseServers: {}, // Enterprise managed servers (highest priority) + globalServers: {}, // Merged user + enterprise configSources: [] // Track where configs came from for debugging }; @@ -650,7 +660,14 @@ function getMcpConfig() { return result; } catch (error: unknown) { console.error('Error reading MCP config:', error); - return { projects: {}, globalServers: {}, userServers: {}, enterpriseServers: {}, configSources: [], error: (error as Error).message }; + return { + projects: {}, + globalServers: {}, + userServers: {}, + enterpriseServers: {}, + configSources: [], + error: error instanceof Error ? error.message : String(error), + }; } } @@ -660,7 +677,7 @@ function getMcpConfig() { * @param {string} path * @returns {string} */ -function normalizePathForFileSystem(path) { +function normalizePathForFileSystem(path: string): string { let normalized = path.replace(/\\/g, '/'); // Handle /d/path format -> D:/path @@ -678,7 +695,7 @@ function normalizePathForFileSystem(path) { * @param {Object} claudeConfig - Optional existing config to check format * @returns {string} */ -function normalizeProjectPathForConfig(path, claudeConfig = null) { +function normalizeProjectPathForConfig(path: string, claudeConfig: unknown = null): string { // IMPORTANT: Always normalize to forward slashes to prevent duplicate entries // (e.g., prevents both "D:/Claude_dms3" and "D:\\Claude_dms3") let normalizedForward = path.replace(/\\/g, '/'); @@ -699,7 +716,7 @@ function normalizeProjectPathForConfig(path, claudeConfig = null) { * @param {boolean} enable * @returns {Object} */ -function toggleMcpServerEnabled(projectPath, serverName, enable) { +function toggleMcpServerEnabled(projectPath: string, serverName: string, enable: boolean) { try { if (!existsSync(CLAUDE_CONFIG_PATH)) { return { error: '.claude.json not found' }; @@ -723,7 +740,7 @@ function toggleMcpServerEnabled(projectPath, serverName, enable) { if (enable) { // Remove from disabled list - projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter(s => s !== serverName); + projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter((s: string) => s !== serverName); } else { // Add to disabled list if not already there if (!projectConfig.disabledMcpServers.includes(serverName)) { @@ -755,7 +772,7 @@ function toggleMcpServerEnabled(projectPath, serverName, enable) { * @param {boolean} useLegacyConfig - If true, use .claude.json instead of .mcp.json * @returns {Object} */ -function addMcpServerToProject(projectPath, serverName, serverConfig, useLegacyConfig = false) { +function addMcpServerToProject(projectPath: string, serverName: string, serverConfig: unknown, useLegacyConfig: boolean = false) { try { // Default: Use .mcp.json for project-level MCP servers if (!useLegacyConfig) { @@ -823,7 +840,7 @@ function addMcpServerToProject(projectPath, serverName, serverConfig, useLegacyC * @param {string} serverName * @returns {Object} */ -function removeMcpServerFromProject(projectPath, serverName) { +function removeMcpServerFromProject(projectPath: string, serverName: string) { try { const normalizedPathForFile = normalizePathForFileSystem(projectPath); const mcpJsonPath = join(normalizedPathForFile, '.mcp.json'); @@ -859,7 +876,7 @@ function removeMcpServerFromProject(projectPath, serverName) { // Also remove from disabled list if present if (projectConfig.disabledMcpServers) { - projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter(s => s !== serverName); + projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter((s: string) => s !== serverName); } // Write back to file @@ -894,7 +911,7 @@ function removeMcpServerFromProject(projectPath, serverName) { * @param {Object} serverConfig * @returns {Object} */ -function addGlobalMcpServer(serverName, serverConfig) { +function addGlobalMcpServer(serverName: string, serverConfig: unknown) { try { if (!existsSync(CLAUDE_CONFIG_PATH)) { return { error: '.claude.json not found' }; @@ -931,7 +948,7 @@ function addGlobalMcpServer(serverName, serverConfig) { * @param {string} serverName * @returns {Object} */ -function removeGlobalMcpServer(serverName) { +function removeGlobalMcpServer(serverName: string) { try { if (!existsSync(CLAUDE_CONFIG_PATH)) { return { error: '.claude.json not found' }; @@ -967,7 +984,7 @@ function removeGlobalMcpServer(serverName) { * @param {string} filePath * @returns {Object} */ -function readSettingsFile(filePath) { +function readSettingsFile(filePath: string) { try { if (!existsSync(filePath)) { return {}; @@ -985,7 +1002,7 @@ function readSettingsFile(filePath) { * @param {string} filePath * @param {Object} settings */ -function writeSettingsFile(filePath, settings) { +function writeSettingsFile(filePath: string, settings: any) { const dirPath = dirname(filePath); // Ensure directory exists if (!existsSync(dirPath)) { @@ -999,7 +1016,7 @@ function writeSettingsFile(filePath, settings) { * @param {string} projectPath * @returns {string} */ -function getProjectSettingsPath(projectPath) { +function getProjectSettingsPath(projectPath: string): string { // path.join automatically handles cross-platform path separators return join(projectPath, '.claude', 'settings.json'); } @@ -1008,6 +1025,10 @@ function getProjectSettingsPath(projectPath) { // Route Handlers // ======================================== +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null; +} + /** * Handle MCP routes * @returns true if route was handled, false otherwise @@ -1043,11 +1064,22 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Add Codex MCP server if (pathname === '/api/codex-mcp-add' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { serverName, serverConfig } = body; - if (!serverName || !serverConfig) { - return { error: 'serverName and serverConfig are required', status: 400 }; + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; } - return addCodexMcpServer(serverName, serverConfig); + + const serverName = body.serverName; + const serverConfig = body.serverConfig; + + if (typeof serverName !== 'string' || !serverName.trim()) { + return { error: 'serverName is required', status: 400 }; + } + + if (!isRecord(serverConfig)) { + return { error: 'serverConfig is required', status: 400 }; + } + + return addCodexMcpServer(serverName, serverConfig as Record); }); return true; } @@ -1055,8 +1087,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Remove Codex MCP server if (pathname === '/api/codex-mcp-remove' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { serverName } = body; - if (!serverName) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const serverName = body.serverName; + if (typeof serverName !== 'string' || !serverName.trim()) { return { error: 'serverName is required', status: 400 }; } return removeCodexMcpServer(serverName); @@ -1067,8 +1103,14 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Toggle Codex MCP server enabled state if (pathname === '/api/codex-mcp-toggle' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { serverName, enabled } = body; - if (!serverName || enabled === undefined) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const serverName = body.serverName; + const enabled = body.enabled; + + if (typeof serverName !== 'string' || !serverName.trim() || typeof enabled !== 'boolean') { return { error: 'serverName and enabled are required', status: 400 }; } return toggleCodexMcpServer(serverName, enabled); @@ -1079,9 +1121,16 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Toggle MCP server enabled/disabled if (pathname === '/api/mcp-toggle' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { projectPath, serverName, enable } = body; - if (!projectPath || !serverName) { - return { error: 'projectPath and serverName are required', status: 400 }; + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const projectPath = body.projectPath; + const serverName = body.serverName; + const enable = body.enable; + + if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim() || typeof enable !== 'boolean') { + return { error: 'projectPath, serverName, and enable are required', status: 400 }; } return toggleMcpServerEnabled(projectPath, serverName, enable); }); @@ -1091,8 +1140,16 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Copy MCP server to project if (pathname === '/api/mcp-copy-server' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { projectPath, serverName, serverConfig, configType } = body; - if (!projectPath || !serverName || !serverConfig) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const projectPath = body.projectPath; + const serverName = body.serverName; + const serverConfig = body.serverConfig; + const configType = body.configType; + + if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim() || serverConfig === undefined || serverConfig === null) { return { error: 'projectPath, serverName, and serverConfig are required', status: 400 }; } // configType: 'mcp' = use .mcp.json (default), 'claude' = use .claude.json @@ -1105,8 +1162,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Install CCW MCP server to project if (pathname === '/api/mcp-install-ccw' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { projectPath } = body; - if (!projectPath) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const projectPath = body.projectPath; + if (typeof projectPath !== 'string' || !projectPath.trim()) { return { error: 'projectPath is required', status: 400 }; } @@ -1129,8 +1190,13 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Remove MCP server from project if (pathname === '/api/mcp-remove-server' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { projectPath, serverName } = body; - if (!projectPath || !serverName) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const projectPath = body.projectPath; + const serverName = body.serverName; + if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim()) { return { error: 'projectPath and serverName are required', status: 400 }; } return removeMcpServerFromProject(projectPath, serverName); @@ -1141,8 +1207,13 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Add MCP server to global scope (top-level mcpServers in ~/.claude.json) if (pathname === '/api/mcp-add-global-server' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { serverName, serverConfig } = body; - if (!serverName || !serverConfig) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const serverName = body.serverName; + const serverConfig = body.serverConfig; + if (typeof serverName !== 'string' || !serverName.trim() || serverConfig === undefined || serverConfig === null) { return { error: 'serverName and serverConfig are required', status: 400 }; } return addGlobalMcpServer(serverName, serverConfig); @@ -1153,8 +1224,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Remove MCP server from global scope if (pathname === '/api/mcp-remove-global-server' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { serverName } = body; - if (!serverName) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const serverName = body.serverName; + if (typeof serverName !== 'string' || !serverName.trim()) { return { error: 'serverName is required', status: 400 }; } return removeGlobalMcpServer(serverName); @@ -1177,14 +1252,29 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Save MCP template if (pathname === '/api/mcp-templates' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { name, description, serverConfig, tags, category } = body; - if (!name || !serverConfig) { - return { error: 'name and serverConfig are required', status: 400 }; + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; } + + const name = body.name; + const serverConfig = body.serverConfig; + + if (typeof name !== 'string' || !name.trim()) { + return { error: 'name is required', status: 400 }; + } + + if (!isRecord(serverConfig) || typeof serverConfig.command !== 'string') { + return { error: 'serverConfig with command is required', status: 400 }; + } + + const description = typeof body.description === 'string' ? body.description : undefined; + const tags = Array.isArray(body.tags) ? body.tags.filter((tag): tag is string => typeof tag === 'string') : undefined; + const category = typeof body.category === 'string' ? body.category : undefined; + return McpTemplatesDb.saveTemplate({ name, description, - serverConfig, + serverConfig: serverConfig as McpTemplatesDb.McpTemplate['serverConfig'], tags, category }); @@ -1244,8 +1334,15 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { // API: Install template to project or global if (pathname === '/api/mcp-templates/install' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { templateName, projectPath, scope } = body; - if (!templateName) { + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const templateName = body.templateName; + const projectPath = body.projectPath; + const scope = body.scope; + + if (typeof templateName !== 'string' || !templateName.trim()) { return { error: 'templateName is required', status: 400 }; } @@ -1258,7 +1355,7 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise { if (scope === 'global') { return addGlobalMcpServer(templateName, template.serverConfig); } else { - if (!projectPath) { + if (typeof projectPath !== 'string' || !projectPath.trim()) { return { error: 'projectPath is required for project scope', status: 400 }; } return addMcpServerToProject(projectPath, templateName, template.serverConfig); diff --git a/ccw/src/core/routes/mcp-routes.ts.backup b/ccw/src/core/routes/mcp-routes.ts.backup index 2a53d079..efb88da0 100644 --- a/ccw/src/core/routes/mcp-routes.ts.backup +++ b/ccw/src/core/routes/mcp-routes.ts.backup @@ -1,4 +1,3 @@ -// @ts-nocheck /** * MCP Routes Module * Handles all MCP-related API endpoints diff --git a/ccw/src/core/routes/mcp-templates-db.ts b/ccw/src/core/routes/mcp-templates-db.ts index 704c8d13..046a75d7 100644 --- a/ccw/src/core/routes/mcp-templates-db.ts +++ b/ccw/src/core/routes/mcp-templates-db.ts @@ -1,4 +1,3 @@ -// @ts-nocheck /** * MCP Templates Database Module * Stores MCP server configurations as reusable templates diff --git a/ccw/src/core/routes/nav-status-routes.ts b/ccw/src/core/routes/nav-status-routes.ts index cb6bd3a7..77f14682 100644 --- a/ccw/src/core/routes/nav-status-routes.ts +++ b/ccw/src/core/routes/nav-status-routes.ts @@ -1,4 +1,3 @@ -// @ts-nocheck /** * Navigation Status Routes Module * Aggregated status endpoint for navigation bar badge updates @@ -6,18 +5,10 @@ * API Endpoints: * - GET /api/nav-status - Get aggregated navigation bar status (counts for all badges) */ -import type { IncomingMessage, ServerResponse } from 'http'; import { existsSync, readFileSync, readdirSync } from 'fs'; import { join } from 'path'; import { homedir } from 'os'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; -} +import type { RouteContext } from './types.js'; // ========== Count Helper Functions ========== diff --git a/ccw/src/core/routes/rules-routes.ts b/ccw/src/core/routes/rules-routes.ts index a1ae928b..fb90e8e2 100644 --- a/ccw/src/core/routes/rules-routes.ts +++ b/ccw/src/core/routes/rules-routes.ts @@ -1,22 +1,51 @@ -// @ts-nocheck /** * Rules Routes Module * Handles all Rules-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, existsSync, readdirSync, unlinkSync, promises as fsPromises } from 'fs'; import { join } from 'path'; import { homedir } from 'os'; import { executeCliTool } from '../../tools/cli-executor.js'; +import type { RouteContext } from './types.js'; -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; +interface ParsedRuleFrontmatter { + paths: string[]; + content: string; +} + +interface RuleDetail { + name: string; + paths: string[]; + content: string; + location: string; + path: string; + subdirectory: string | null; +} + +interface RuleConfigResult { + projectRules: RuleDetail[]; + userRules: RuleDetail[]; +} + +interface RuleCreateParams { + fileName: string; + content: string; + paths: string[]; + location: string; + subdirectory: string; + projectPath: string; +} + +interface RuleGenerateParams { + generationType: string; + description?: string; + templateType?: string; + extractScope?: string; + extractFocus?: string; + fileName: string; + location: string; + subdirectory: string; + projectPath: string; } /** @@ -24,8 +53,8 @@ export interface RouteContext { * @param {string} content * @returns {Object} */ -function parseRuleFrontmatter(content) { - const result = { +function parseRuleFrontmatter(content: string): ParsedRuleFrontmatter { + const result: ParsedRuleFrontmatter = { paths: [], content: content }; @@ -64,8 +93,8 @@ function parseRuleFrontmatter(content) { * @param {string} subdirectory * @returns {Object[]} */ -function scanRulesDirectory(dirPath, location, subdirectory) { - const rules = []; +function scanRulesDirectory(dirPath: string, location: string, subdirectory: string): RuleDetail[] { + const rules: RuleDetail[] = []; try { const entries = readdirSync(dirPath, { withFileTypes: true }); @@ -102,8 +131,8 @@ function scanRulesDirectory(dirPath, location, subdirectory) { * @param {string} projectPath * @returns {Object} */ -function getRulesConfig(projectPath) { - const result = { +function getRulesConfig(projectPath: string): RuleConfigResult { + const result: RuleConfigResult = { projectRules: [], userRules: [] }; @@ -135,7 +164,7 @@ function getRulesConfig(projectPath) { * @param {string} ruleName * @returns {string|null} */ -function findRuleFile(baseDir, ruleName) { +function findRuleFile(baseDir: string, ruleName: string): string | null { try { // Direct path const directPath = join(baseDir, ruleName); @@ -164,7 +193,7 @@ function findRuleFile(baseDir, ruleName) { * @param {string} projectPath * @returns {Object} */ -function getRuleDetail(ruleName, location, projectPath) { +function getRuleDetail(ruleName: string, location: string, projectPath: string): { rule?: RuleDetail; error?: string } { try { const baseDir = location === 'project' ? join(projectPath, '.claude', 'rules') @@ -180,17 +209,26 @@ function getRuleDetail(ruleName, location, projectPath) { const content = readFileSync(rulePath, 'utf8'); const parsed = parseRuleFrontmatter(content); + const normalizedBaseDir = baseDir.replace(/\\/g, '/').replace(/\/+$/, ''); + const normalizedRulePath = rulePath.replace(/\\/g, '/'); + const relativePath = normalizedRulePath.startsWith(`${normalizedBaseDir}/`) + ? normalizedRulePath.slice(normalizedBaseDir.length + 1) + : ruleName; + const relativeParts = relativePath.split('/'); + const subdirectory = relativeParts.length > 1 ? relativeParts.slice(0, -1).join('/') : null; + return { rule: { name: ruleName, paths: parsed.paths, content: parsed.content, location, - path: rulePath + path: rulePath, + subdirectory } }; } catch (error) { - return { error: (error as Error).message }; + return { error: error instanceof Error ? error.message : String(error) }; } } @@ -201,7 +239,11 @@ function getRuleDetail(ruleName, location, projectPath) { * @param {string} projectPath * @returns {Object} */ -function deleteRule(ruleName, location, projectPath) { +function deleteRule( + ruleName: string, + location: string, + projectPath: string +): { success: true; ruleName: string; location: string } | { error: string; status?: number } { try { const baseDir = location === 'project' ? join(projectPath, '.claude', 'rules') @@ -217,7 +259,7 @@ function deleteRule(ruleName, location, projectPath) { return { success: true, ruleName, location }; } catch (error) { - return { error: (error as Error).message }; + return { error: error instanceof Error ? error.message : String(error) }; } } @@ -540,7 +582,7 @@ RULES: $(cat ~/.claude/workflows/cli-templates/prompts/universal/00-universal-ri * @param {boolean} params.enableReview - Optional: enable secondary review * @returns {Object} */ -async function generateRuleViaCLI(params) { +async function generateRuleViaCLI(params: RuleGenerateParams): Promise> { try { const { generationType, @@ -682,8 +724,8 @@ FILE NAME: ${fileName}`; executionId: result.conversation?.id, review: reviewResult }; - } catch (error) { - return { error: (error as Error).message }; + } catch (error: unknown) { + return { error: error instanceof Error ? error.message : String(error) }; } } @@ -698,7 +740,7 @@ FILE NAME: ${fileName}`; * @param {string} params.projectPath - Project root path * @returns {Object} */ -async function createRule(params) { +async function createRule(params: RuleCreateParams): Promise> { try { const { fileName, content, paths, location, subdirectory, projectPath } = params; @@ -749,8 +791,8 @@ paths: [${paths.join(', ')}] path: filePath, subdirectory: subdirectory || null }; - } catch (error) { - return { error: (error as Error).message }; + } catch (error: unknown) { + return { error: error instanceof Error ? error.message : String(error) }; } } @@ -790,8 +832,11 @@ export async function handleRulesRoutes(ctx: RouteContext): Promise { if (pathname.startsWith('/api/rules/') && req.method === 'DELETE') { const ruleName = decodeURIComponent(pathname.replace('/api/rules/', '')); handlePostRequest(req, res, async (body) => { - const { location, projectPath: projectPathParam } = body; - return deleteRule(ruleName, location, projectPathParam || initialPath); + const { location, projectPath: projectPathParam } = body as { location?: unknown; projectPath?: unknown }; + const resolvedLocation = typeof location === 'string' && location.trim().length > 0 ? location : 'project'; + const resolvedProjectPath = + typeof projectPathParam === 'string' && projectPathParam.trim().length > 0 ? projectPathParam : initialPath; + return deleteRule(ruleName, resolvedLocation, resolvedProjectPath); }); return true; } @@ -807,63 +852,89 @@ export async function handleRulesRoutes(ctx: RouteContext): Promise { location, subdirectory, projectPath: projectPathParam, - // CLI generation parameters generationType, description, templateType, extractScope, extractFocus - } = body; + } = body as { + mode?: unknown; + fileName?: unknown; + content?: unknown; + paths?: unknown; + location?: unknown; + subdirectory?: unknown; + projectPath?: unknown; + generationType?: unknown; + description?: unknown; + templateType?: unknown; + extractScope?: unknown; + extractFocus?: unknown; + }; - if (!fileName) { + const resolvedMode = typeof mode === 'string' ? mode : ''; + const resolvedFileName = typeof fileName === 'string' ? fileName : ''; + const resolvedContent = typeof content === 'string' ? content : ''; + const resolvedLocation = typeof location === 'string' && location.trim().length > 0 ? location : ''; + const resolvedSubdirectory = typeof subdirectory === 'string' ? subdirectory : ''; + const resolvedProjectPath = + typeof projectPathParam === 'string' && projectPathParam.trim().length > 0 ? projectPathParam : initialPath; + const resolvedGenerationType = typeof generationType === 'string' ? generationType : ''; + const resolvedDescription = typeof description === 'string' ? description : undefined; + const resolvedTemplateType = typeof templateType === 'string' ? templateType : undefined; + const resolvedExtractScope = typeof extractScope === 'string' ? extractScope : undefined; + const resolvedExtractFocus = typeof extractFocus === 'string' ? extractFocus : undefined; + const resolvedPaths = Array.isArray(paths) ? paths.filter((p): p is string => typeof p === 'string') : []; + + if (!resolvedFileName) { return { error: 'File name is required' }; } - if (!location) { + if (!resolvedLocation) { return { error: 'Location is required (project or user)' }; } - const projectPath = projectPathParam || initialPath; + const projectPath = resolvedProjectPath; // CLI generation mode - if (mode === 'cli-generate') { - if (!generationType) { + if (resolvedMode === 'cli-generate') { + if (!resolvedGenerationType) { return { error: 'generationType is required for CLI generation mode' }; } // Validate based on generation type - if (generationType === 'description' && !description) { + if (resolvedGenerationType === 'description' && !resolvedDescription) { return { error: 'description is required for description-based generation' }; } - if (generationType === 'template' && !templateType) { + if (resolvedGenerationType === 'template' && !resolvedTemplateType) { return { error: 'templateType is required for template-based generation' }; } return await generateRuleViaCLI({ - generationType, - description, - templateType, - extractScope, - extractFocus, - fileName, - location, - subdirectory: subdirectory || '', + generationType: resolvedGenerationType, + description: resolvedDescription, + templateType: resolvedTemplateType, + extractScope: resolvedExtractScope, + extractFocus: resolvedExtractFocus, + fileName: resolvedFileName, + location: resolvedLocation, + subdirectory: resolvedSubdirectory || '', projectPath }); } // Manual creation mode - if (!content) { + if (!resolvedContent) { return { error: 'Content is required for manual creation' }; } return await createRule({ - fileName, - content, - paths: paths || [], - location, - subdirectory: subdirectory || '', + fileName: resolvedFileName, + content: resolvedContent, + paths: resolvedPaths, + location: resolvedLocation, + subdirectory: resolvedSubdirectory || '', projectPath }); }); diff --git a/ccw/src/core/routes/session-routes.ts b/ccw/src/core/routes/session-routes.ts index dbf322f3..0a502f2d 100644 --- a/ccw/src/core/routes/session-routes.ts +++ b/ccw/src/core/routes/session-routes.ts @@ -1,21 +1,10 @@ -// @ts-nocheck /** * Session Routes Module * Handles all Session/Task-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, writeFileSync, existsSync, readdirSync } from 'fs'; import { join } from 'path'; - -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} +import type { RouteContext } from './types.js'; /** * Get session detail data (context, summaries, impl-plan, review) @@ -23,8 +12,8 @@ export interface RouteContext { * @param {string} dataType - Type of data to load ('all', 'context', 'tasks', 'summary', 'plan', 'explorations', 'conflict', 'impl-plan', 'review') * @returns {Promise} */ -async function getSessionDetailData(sessionPath, dataType) { - const result = {}; +async function getSessionDetailData(sessionPath: string, dataType: string): Promise> { + const result: any = {}; // Normalize path const normalizedPath = sessionPath.replace(/\\/g, '/'); @@ -66,7 +55,7 @@ async function getSessionDetailData(sessionPath, dataType) { } } // Sort by task ID - result.tasks.sort((a, b) => a.task_id.localeCompare(b.task_id)); + result.tasks.sort((a: { task_id: string }, b: { task_id: string }) => a.task_id.localeCompare(b.task_id)); } } @@ -341,7 +330,7 @@ async function getSessionDetailData(sessionPath, dataType) { * @param {string} newStatus - New status (pending, in_progress, completed) * @returns {Promise} */ -async function updateTaskStatus(sessionPath, taskId, newStatus) { +async function updateTaskStatus(sessionPath: string, taskId: string, newStatus: string): Promise> { // Normalize path (handle both forward and back slashes) let normalizedPath = sessionPath.replace(/\\/g, '/'); @@ -429,9 +418,17 @@ export async function handleSessionRoutes(ctx: RouteContext): Promise { // API: Update task status if (pathname === '/api/update-task-status' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { sessionPath, taskId, newStatus } = body; + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } - if (!sessionPath || !taskId || !newStatus) { + const { sessionPath, taskId, newStatus } = body as { + sessionPath?: unknown; + taskId?: unknown; + newStatus?: unknown; + }; + + if (typeof sessionPath !== 'string' || typeof taskId !== 'string' || typeof newStatus !== 'string') { return { error: 'sessionPath, taskId, and newStatus are required', status: 400 }; } @@ -443,19 +440,28 @@ export async function handleSessionRoutes(ctx: RouteContext): Promise { // API: Bulk update task status if (pathname === '/api/bulk-update-task-status' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { sessionPath, taskIds, newStatus } = body; + if (typeof body !== 'object' || body === null) { + return { error: 'Invalid request body', status: 400 }; + } - if (!sessionPath || !taskIds || !newStatus) { + const { sessionPath, taskIds, newStatus } = body as { + sessionPath?: unknown; + taskIds?: unknown; + newStatus?: unknown; + }; + + if (typeof sessionPath !== 'string' || !Array.isArray(taskIds) || typeof newStatus !== 'string') { return { error: 'sessionPath, taskIds, and newStatus are required', status: 400 }; } - const results = []; + const results: Array> = []; for (const taskId of taskIds) { + if (typeof taskId !== 'string') continue; try { const result = await updateTaskStatus(sessionPath, taskId, newStatus); results.push(result); } catch (err) { - results.push({ taskId, error: err.message }); + results.push({ taskId, error: err instanceof Error ? err.message : String(err) }); } } return { success: true, results }; diff --git a/ccw/src/core/routes/skills-routes.ts b/ccw/src/core/routes/skills-routes.ts index 42b7aeff..2b6ca1c4 100644 --- a/ccw/src/core/routes/skills-routes.ts +++ b/ccw/src/core/routes/skills-routes.ts @@ -1,22 +1,64 @@ -// @ts-nocheck /** * Skills Routes Module * Handles all Skills-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import { readFileSync, existsSync, readdirSync, statSync, unlinkSync, promises as fsPromises } from 'fs'; import { join } from 'path'; import { homedir } from 'os'; import { executeCliTool } from '../../tools/cli-executor.js'; +import { validatePath as validateAllowedPath } from '../../utils/path-validator.js'; +import type { RouteContext } from './types.js'; -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; +type SkillLocation = 'project' | 'user'; + +interface ParsedSkillFrontmatter { + name: string; + description: string; + version: string | null; + allowedTools: string[]; + content: string; +} + +interface SkillSummary { + name: string; + folderName: string; + description: string; + version: string | null; + allowedTools: string[]; + location: SkillLocation; + path: string; + supportingFiles: string[]; +} + +interface SkillsConfig { + projectSkills: SkillSummary[]; + userSkills: SkillSummary[]; +} + +interface SkillInfo { + name: string; + description: string; + version: string | null; + allowedTools: string[]; + supportingFiles: string[]; +} + +type SkillFolderValidation = + | { valid: true; errors: string[]; skillInfo: SkillInfo } + | { valid: false; errors: string[]; skillInfo: null }; + +type GenerationType = 'description' | 'template'; + +interface GenerationParams { + generationType: GenerationType; + description?: string; + skillName: string; + location: SkillLocation; + projectPath: string; +} + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null; } // ========== Skills Helper Functions ========== @@ -26,8 +68,8 @@ export interface RouteContext { * @param {string} content - Skill file content * @returns {Object} Parsed frontmatter and content */ -function parseSkillFrontmatter(content) { - const result = { +function parseSkillFrontmatter(content: string): ParsedSkillFrontmatter { + const result: ParsedSkillFrontmatter = { name: '', description: '', version: null, @@ -58,7 +100,11 @@ function parseSkillFrontmatter(content) { result.version = value.replace(/^["']|["']$/g, ''); } else if (key === 'allowed-tools' || key === 'allowedtools') { // Parse as comma-separated or YAML array - result.allowedTools = value.replace(/^\[|\]$/g, '').split(',').map(t => t.trim()).filter(Boolean); + result.allowedTools = value + .replace(/^\[|\]$/g, '') + .split(',') + .map((tool) => tool.trim()) + .filter(Boolean); } } } @@ -75,8 +121,8 @@ function parseSkillFrontmatter(content) { * @param {string} skillDir * @returns {string[]} */ -function getSupportingFiles(skillDir) { - const files = []; +function getSupportingFiles(skillDir: string): string[] { + const files: string[] = []; try { const entries = readdirSync(skillDir, { withFileTypes: true }); for (const entry of entries) { @@ -99,8 +145,8 @@ function getSupportingFiles(skillDir) { * @param {string} projectPath * @returns {Object} */ -function getSkillsConfig(projectPath) { - const result = { +function getSkillsConfig(projectPath: string): SkillsConfig { + const result: SkillsConfig = { projectSkills: [], userSkills: [] }; @@ -179,17 +225,44 @@ function getSkillsConfig(projectPath) { * @param {string} projectPath * @returns {Object} */ -function getSkillDetail(skillName, location, projectPath) { +async function getSkillDetail(skillName: string, location: SkillLocation, projectPath: string, initialPath: string) { try { - const baseDir = location === 'project' - ? join(projectPath, '.claude', 'skills') - : join(homedir(), '.claude', 'skills'); + if (skillName.includes('/') || skillName.includes('\\')) { + return { error: 'Access denied', status: 403 }; + } + if (skillName.includes('..')) { + return { error: 'Invalid skill name', status: 400 }; + } + + let baseDir; + if (location === 'project') { + try { + const validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] }); + baseDir = join(validatedProjectPath, '.claude', 'skills'); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } + } else { + baseDir = join(homedir(), '.claude', 'skills'); + } const skillDir = join(baseDir, skillName); - const skillMdPath = join(skillDir, 'SKILL.md'); + const skillMdCandidate = join(skillDir, 'SKILL.md'); - if (!existsSync(skillMdPath)) { - return { error: 'Skill not found' }; + let skillMdPath; + try { + skillMdPath = await validateAllowedPath(skillMdCandidate, { mustExist: true, allowedDirectories: [skillDir] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + if (message.includes('File not found')) { + return { error: 'Skill not found', status: 404 }; + } + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; } const content = readFileSync(skillMdPath, 'utf8'); @@ -210,7 +283,7 @@ function getSkillDetail(skillName, location, projectPath) { } }; } catch (error) { - return { error: (error as Error).message }; + return { error: (error as Error).message, status: 500 }; } } @@ -221,38 +294,50 @@ function getSkillDetail(skillName, location, projectPath) { * @param {string} projectPath * @returns {Object} */ -function deleteSkill(skillName, location, projectPath) { +async function deleteSkill(skillName: string, location: SkillLocation, projectPath: string, initialPath: string) { try { - const baseDir = location === 'project' - ? join(projectPath, '.claude', 'skills') - : join(homedir(), '.claude', 'skills'); - - const skillDir = join(baseDir, skillName); - - if (!existsSync(skillDir)) { - return { error: 'Skill not found' }; + if (skillName.includes('/') || skillName.includes('\\')) { + return { error: 'Access denied', status: 403 }; + } + if (skillName.includes('..')) { + return { error: 'Invalid skill name', status: 400 }; } - // Recursively delete directory - const deleteRecursive = (dirPath) => { - if (existsSync(dirPath)) { - readdirSync(dirPath).forEach((file) => { - const curPath = join(dirPath, file); - if (statSync(curPath).isDirectory()) { - deleteRecursive(curPath); - } else { - unlinkSync(curPath); - } - }); - fsPromises.rmdir(dirPath); + let baseDir; + if (location === 'project') { + try { + const validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] }); + baseDir = join(validatedProjectPath, '.claude', 'skills'); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; } - }; + } else { + baseDir = join(homedir(), '.claude', 'skills'); + } - deleteRecursive(skillDir); + const skillDirCandidate = join(baseDir, skillName); + + let skillDir; + try { + skillDir = await validateAllowedPath(skillDirCandidate, { mustExist: true, allowedDirectories: [baseDir] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + if (message.includes('File not found')) { + return { error: 'Skill not found', status: 404 }; + } + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } + + await fsPromises.rm(skillDir, { recursive: true, force: true }); return { success: true, skillName, location }; } catch (error) { - return { error: (error as Error).message }; + return { error: (error as Error).message, status: 500 }; } } @@ -261,8 +346,8 @@ function deleteSkill(skillName, location, projectPath) { * @param {string} folderPath - Path to skill folder * @returns {Object} Validation result with skill info */ -function validateSkillFolder(folderPath) { - const errors = []; +function validateSkillFolder(folderPath: string): SkillFolderValidation { + const errors: string[] = []; // Check if folder exists if (!existsSync(folderPath)) { @@ -327,7 +412,7 @@ function validateSkillFolder(folderPath) { * @param {string} source - Source directory path * @param {string} target - Target directory path */ -async function copyDirectoryRecursive(source, target) { +async function copyDirectoryRecursive(source: string, target: string): Promise { await fsPromises.mkdir(target, { recursive: true }); const entries = await fsPromises.readdir(source, { withFileTypes: true }); @@ -352,7 +437,7 @@ async function copyDirectoryRecursive(source, target) { * @param {string} customName - Optional custom name for skill * @returns {Object} */ -async function importSkill(sourcePath, location, projectPath, customName) { +async function importSkill(sourcePath: string, location: SkillLocation, projectPath: string, customName?: string) { try { // Validate source folder const validation = validateSkillFolder(sourcePath); @@ -371,6 +456,9 @@ async function importSkill(sourcePath, location, projectPath, customName) { // Determine target folder name const skillName = customName || validation.skillInfo.name; + if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) { + return { error: 'Invalid skill name', status: 400 }; + } const targetPath = join(baseDir, skillName); // Check if already exists @@ -402,7 +490,7 @@ async function importSkill(sourcePath, location, projectPath, customName) { * @param {string} params.projectPath - Project root path * @returns {Object} */ -async function generateSkillViaCLI({ generationType, description, skillName, location, projectPath }) { +async function generateSkillViaCLI({ generationType, description, skillName, location, projectPath }: GenerationParams) { try { // Validate inputs if (!skillName) { @@ -523,9 +611,19 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { // API: Get all skills (project and user) if (pathname === '/api/skills') { const projectPathParam = url.searchParams.get('path') || initialPath; - const skillsData = getSkillsConfig(projectPathParam); - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(skillsData)); + + try { + const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] }); + const skillsData = getSkillsConfig(validatedProjectPath); + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(skillsData)); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path', projectSkills: [], userSkills: [] })); + } return true; } @@ -537,18 +635,46 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { const location = url.searchParams.get('location') || 'project'; const projectPathParam = url.searchParams.get('path') || initialPath; - const baseDir = location === 'project' - ? join(projectPathParam, '.claude', 'skills') - : join(homedir(), '.claude', 'skills'); + if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Invalid skill name' })); + return true; + } - const dirPath = subPath - ? join(baseDir, skillName, subPath) - : join(baseDir, skillName); + let baseDir: string; + if (location === 'project') { + try { + const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] }); + baseDir = join(validatedProjectPath, '.claude', 'skills'); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' })); + return true; + } + } else { + baseDir = join(homedir(), '.claude', 'skills'); + } - // Security check: ensure path is within skill folder - if (!dirPath.startsWith(join(baseDir, skillName))) { - res.writeHead(403, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Access denied' })); + const skillRoot = join(baseDir, skillName); + const requestedDir = subPath ? join(skillRoot, subPath) : skillRoot; + + let dirPath: string; + try { + dirPath = await validateAllowedPath(requestedDir, { mustExist: true, allowedDirectories: [skillRoot] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + if (message.includes('File not found')) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Directory not found' })); + return true; + } + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' })); return true; } @@ -596,16 +722,46 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { return true; } - const baseDir = location === 'project' - ? join(projectPathParam, '.claude', 'skills') - : join(homedir(), '.claude', 'skills'); + if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Invalid skill name' })); + return true; + } - const filePath = join(baseDir, skillName, fileName); + let baseDir: string; + if (location === 'project') { + try { + const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] }); + baseDir = join(validatedProjectPath, '.claude', 'skills'); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' })); + return true; + } + } else { + baseDir = join(homedir(), '.claude', 'skills'); + } - // Security check: ensure file is within skill folder - if (!filePath.startsWith(join(baseDir, skillName))) { - res.writeHead(403, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: 'Access denied' })); + const skillRoot = join(baseDir, skillName); + const requestedFile = join(skillRoot, fileName); + + let filePath: string; + try { + filePath = await validateAllowedPath(requestedFile, { mustExist: true, allowedDirectories: [skillRoot] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + if (message.includes('File not found')) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'File not found' })); + return true; + } + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + res.writeHead(status, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' })); return true; } @@ -632,25 +788,54 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { const skillName = decodeURIComponent(pathParts[3]); handlePostRequest(req, res, async (body) => { - const { fileName, content, location, projectPath: projectPathParam } = body; + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } - if (!fileName) { + const fileName = body.fileName; + const content = body.content; + const location: SkillLocation = body.location === 'project' ? 'project' : 'user'; + const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined; + + if (typeof fileName !== 'string' || !fileName) { return { error: 'fileName is required' }; } - if (content === undefined) { + if (typeof content !== 'string') { return { error: 'content is required' }; } - const baseDir = location === 'project' - ? join(projectPathParam || initialPath, '.claude', 'skills') - : join(homedir(), '.claude', 'skills'); + if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) { + return { error: 'Invalid skill name', status: 400 }; + } - const filePath = join(baseDir, skillName, fileName); + let baseDir: string; + if (location === 'project') { + try { + const projectRoot = projectPathParam || initialPath; + const validatedProjectPath = await validateAllowedPath(projectRoot, { mustExist: true, allowedDirectories: [initialPath] }); + baseDir = join(validatedProjectPath, '.claude', 'skills'); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } + } else { + baseDir = join(homedir(), '.claude', 'skills'); + } - // Security check: ensure file is within skill folder - if (!filePath.startsWith(join(baseDir, skillName))) { - return { error: 'Access denied' }; + const skillRoot = join(baseDir, skillName); + const requestedFile = join(skillRoot, fileName); + + let filePath: string; + try { + filePath = await validateAllowedPath(requestedFile, { allowedDirectories: [skillRoot] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; } try { @@ -667,25 +852,43 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { if (pathname.startsWith('/api/skills/') && req.method === 'GET' && !pathname.endsWith('/skills/') && !pathname.endsWith('/dir') && !pathname.endsWith('/file')) { const skillName = decodeURIComponent(pathname.replace('/api/skills/', '')); - const location = url.searchParams.get('location') || 'project'; + const locationParam = url.searchParams.get('location'); + const location: SkillLocation = locationParam === 'user' ? 'user' : 'project'; const projectPathParam = url.searchParams.get('path') || initialPath; - const skillDetail = getSkillDetail(skillName, location, projectPathParam); + const skillDetail = await getSkillDetail(skillName, location, projectPathParam, initialPath); if (skillDetail.error) { - res.writeHead(404, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(skillDetail)); - } else { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(skillDetail)); + res.writeHead(skillDetail.status || 404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: skillDetail.error })); + return true; } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(skillDetail)); return true; } // API: Delete skill if (pathname.startsWith('/api/skills/') && req.method === 'DELETE') { const skillName = decodeURIComponent(pathname.replace('/api/skills/', '')); + if (skillName.includes('/') || skillName.includes('\\')) { + res.writeHead(403, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Access denied' })); + return true; + } + if (skillName.includes('..')) { + res.writeHead(400, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Invalid skill name' })); + return true; + } handlePostRequest(req, res, async (body) => { - const { location, projectPath: projectPathParam } = body; - return deleteSkill(skillName, location, projectPathParam || initialPath); + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } + + const location: SkillLocation = body.location === 'project' ? 'project' : 'user'; + const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined; + + return deleteSkill(skillName, location, projectPathParam || initialPath, initialPath); }); return true; } @@ -693,11 +896,24 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { // API: Validate skill import if (pathname === '/api/skills/validate-import' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { sourcePath } = body; - if (!sourcePath) { + if (!isRecord(body)) { return { valid: false, errors: ['Source path is required'], skillInfo: null }; } - return validateSkillFolder(sourcePath); + + const sourcePath = body.sourcePath; + if (typeof sourcePath !== 'string' || !sourcePath.trim()) { + return { valid: false, errors: ['Source path is required'], skillInfo: null }; + } + + try { + const validatedSourcePath = await validateAllowedPath(sourcePath, { mustExist: true }); + return validateSkillFolder(validatedSourcePath); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } }); return true; } @@ -705,37 +921,77 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise { // API: Create/Import skill if (pathname === '/api/skills/create' && req.method === 'POST') { handlePostRequest(req, res, async (body) => { - const { mode, location, sourcePath, skillName, description, generationType, projectPath: projectPathParam } = body; + if (!isRecord(body)) { + return { error: 'Invalid request body', status: 400 }; + } - if (!mode) { + const mode = body.mode; + const locationValue = body.location; + const sourcePath = typeof body.sourcePath === 'string' ? body.sourcePath : undefined; + const skillName = typeof body.skillName === 'string' ? body.skillName : undefined; + const description = typeof body.description === 'string' ? body.description : undefined; + const generationType = typeof body.generationType === 'string' ? body.generationType : undefined; + const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined; + + if (typeof mode !== 'string' || !mode) { return { error: 'Mode is required (import or cli-generate)' }; } - if (!location) { + if (locationValue !== 'project' && locationValue !== 'user') { return { error: 'Location is required (project or user)' }; } + const location: SkillLocation = locationValue; const projectPath = projectPathParam || initialPath; + let validatedProjectPath = projectPath; + if (location === 'project') { + try { + validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Project path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } + } + if (mode === 'import') { // Import mode: copy existing skill folder if (!sourcePath) { return { error: 'Source path is required for import mode' }; } - return await importSkill(sourcePath, location, projectPath, skillName); + if (skillName && (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..'))) { + return { error: 'Invalid skill name', status: 400 }; + } + + let validatedSourcePath; + try { + validatedSourcePath = await validateAllowedPath(sourcePath, { mustExist: true }); + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + const status = message.includes('Access denied') ? 403 : 400; + console.error(`[Skills] Path validation failed: ${message}`); + return { error: status === 403 ? 'Access denied' : 'Invalid path', status }; + } + + return await importSkill(validatedSourcePath, location, validatedProjectPath, skillName); } else if (mode === 'cli-generate') { // CLI generate mode: use Claude to generate skill if (!skillName) { return { error: 'Skill name is required for CLI generation mode' }; } + if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) { + return { error: 'Invalid skill name', status: 400 }; + } return await generateSkillViaCLI({ - generationType: generationType || 'description', + generationType: generationType === 'template' ? 'template' : 'description', description, skillName, location, - projectPath + projectPath: validatedProjectPath }); } else { return { error: 'Invalid mode. Must be "import" or "cli-generate"' }; diff --git a/ccw/src/core/routes/status-routes.ts b/ccw/src/core/routes/status-routes.ts index 83b0fc02..93c39100 100644 --- a/ccw/src/core/routes/status-routes.ts +++ b/ccw/src/core/routes/status-routes.ts @@ -1,14 +1,13 @@ -// @ts-nocheck /** * Status Routes Module * Aggregated status endpoint for faster dashboard loading */ -import type { IncomingMessage, ServerResponse } from 'http'; import { existsSync } from 'fs'; import { join } from 'path'; import { homedir } from 'os'; import { getCliToolsStatus } from '../../tools/cli-executor.js'; import { checkVenvStatus, checkSemanticStatus } from '../../tools/codex-lens.js'; +import type { RouteContext } from './types.js'; /** * Check CCW installation status @@ -54,16 +53,6 @@ function checkCcwInstallStatus(): { }; } -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; -} - /** * Handle status routes * @returns true if route was handled, false otherwise diff --git a/ccw/src/core/routes/system-routes.ts b/ccw/src/core/routes/system-routes.ts index dd0a63a7..7494bed6 100644 --- a/ccw/src/core/routes/system-routes.ts +++ b/ccw/src/core/routes/system-routes.ts @@ -1,9 +1,7 @@ -// @ts-nocheck /** * System Routes Module * Handles all system-related API endpoints */ -import type { IncomingMessage, ServerResponse } from 'http'; import type { Server } from 'http'; import { readFileSync, existsSync, promises as fsPromises } from 'fs'; import { join } from 'path'; @@ -17,17 +15,11 @@ import { cleanAllStorage, resolveProjectId, projectExists, - formatBytes -} from '../../tools/storage-manager.js'; + formatBytes + } from '../../tools/storage-manager.js'; +import type { RouteContext } from './types.js'; -export interface RouteContext { - pathname: string; - url: URL; - req: IncomingMessage; - res: ServerResponse; - initialPath: string; - handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise) => void; - broadcastToClients: (data: unknown) => void; +interface SystemRouteContext extends RouteContext { server: Server; } @@ -39,7 +31,7 @@ export interface RouteContext { const NPM_PACKAGE_NAME = 'claude-code-workflow'; // Cache for version check (avoid too frequent requests) -let versionCheckCache = null; +let versionCheckCache: Record | null = null; let versionCheckTime = 0; const VERSION_CHECK_CACHE_TTL = 3600000; // 1 hour @@ -83,7 +75,7 @@ function compareVersions(v1: string, v2: string): number { * Check npm registry for latest version * @returns {Promise} */ -async function checkNpmVersion(): Promise { +async function checkNpmVersion(): Promise> { // Return cached result if still valid const now = Date.now(); if (versionCheckCache && (now - versionCheckTime) < VERSION_CHECK_CACHE_TTL) { @@ -103,8 +95,8 @@ async function checkNpmVersion(): Promise { throw new Error('HTTP ' + response.status); } - const data = await response.json(); - const latestVersion = data.version; + const data = await response.json() as { version?: unknown }; + const latestVersion = typeof data.version === 'string' ? data.version : currentVersion; // Compare versions const hasUpdate = compareVersions(latestVersion, currentVersion) > 0; @@ -174,10 +166,11 @@ async function getWorkflowData(projectPath: string): Promise { const sessions = await scanSessions(workflowDir); const data = await aggregateData(sessions, workflowDir); - data.projectPath = normalizePathForDisplay(resolvedPath); - data.recentPaths = getRecentPaths(); - - return data; + return { + ...data, + projectPath: normalizePathForDisplay(resolvedPath), + recentPaths: getRecentPaths() + }; } // ======================================== @@ -188,7 +181,7 @@ async function getWorkflowData(projectPath: string): Promise { * Handle System routes * @returns true if route was handled, false otherwise */ -export async function handleSystemRoutes(ctx: RouteContext): Promise { +export async function handleSystemRoutes(ctx: SystemRouteContext): Promise { const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients, server } = ctx; // API: Get workflow data for a path diff --git a/ccw/src/core/routes/types.ts b/ccw/src/core/routes/types.ts new file mode 100644 index 00000000..6e746022 --- /dev/null +++ b/ccw/src/core/routes/types.ts @@ -0,0 +1,25 @@ +import type { IncomingMessage, ServerResponse } from 'http'; + +export type PostRequestHandler = (body: unknown) => Promise; + +export interface RouteContext { + /** URL pathname (e.g. `/api/status`). */ + pathname: string; + /** Parsed request URL. */ + url: URL; + /** Incoming HTTP request. */ + req: IncomingMessage; + /** HTTP response to write to. */ + res: ServerResponse; + /** Initial path configured for the server (used for dashboard routes). */ + initialPath: string; + /** Helper that parses JSON body and passes it to `handler`. */ + handlePostRequest: ( + req: IncomingMessage, + res: ServerResponse, + handler: PostRequestHandler + ) => void; + /** Broadcast payload to connected dashboard clients. */ + broadcastToClients: (data: unknown) => void; +} + diff --git a/ccw/src/core/server.ts b/ccw/src/core/server.ts index c396cca1..cecfc4b8 100644 --- a/ccw/src/core/server.ts +++ b/ccw/src/core/server.ts @@ -1,4 +1,3 @@ -// @ts-nocheck import http from 'http'; import { URL } from 'url'; import { readFileSync, existsSync } from 'fs'; @@ -27,11 +26,20 @@ import { handleHelpRoutes } from './routes/help-routes.js'; import { handleLiteLLMRoutes } from './routes/litellm-routes.js'; import { handleLiteLLMApiRoutes } from './routes/litellm-api-routes.js'; import { handleNavStatusRoutes } from './routes/nav-status-routes.js'; +import { handleAuthRoutes } from './routes/auth-routes.js'; // Import WebSocket handling -import { handleWebSocketUpgrade, broadcastToClients } from './websocket.js'; +import { handleWebSocketUpgrade, broadcastToClients, extractSessionIdFromPath } from './websocket.js'; + +import { getTokenManager } from './auth/token-manager.js'; +import { authMiddleware, isLocalhostRequest, setAuthCookie } from './auth/middleware.js'; +import { getCorsOrigin } from './cors.js'; +import { csrfValidation } from './auth/csrf-middleware.js'; +import { getCsrfTokenManager } from './auth/csrf-manager.js'; +import { randomBytes } from 'crypto'; import type { ServerConfig } from '../types/config.js'; +import type { PostRequestHandler } from './routes/types.js'; interface ServerOptions { port?: number; @@ -40,13 +48,7 @@ interface ServerOptions { open?: boolean; } -interface PostResult { - error?: string; - status?: number; - [key: string]: unknown; -} - -type PostHandler = (body: unknown) => Promise; +type PostHandler = PostRequestHandler; // Template paths const TEMPLATE_PATH = join(import.meta.dirname, '../../src/templates/dashboard.html'); @@ -158,28 +160,131 @@ const MODULE_FILES = [ * Handle POST request with JSON body */ function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: PostHandler): void { - let body = ''; - req.on('data', chunk => { body += chunk; }); - req.on('end', async () => { + const cachedParsed = (req as any).body; + const cachedRawBody = (req as any).__ccwRawBody; + + const handleBody = async (parsed: unknown) => { try { - const parsed = JSON.parse(body); const result = await handler(parsed); - if (result.error) { - const status = result.status || 500; + const isObjectResult = typeof result === 'object' && result !== null; + const errorValue = isObjectResult && 'error' in result ? (result as { error?: unknown }).error : undefined; + const statusValue = isObjectResult && 'status' in result ? (result as { status?: unknown }).status : undefined; + + if (typeof errorValue === 'string' && errorValue.length > 0) { + const status = typeof statusValue === 'number' ? statusValue : 500; res.writeHead(status, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: result.error })); - } else { - res.writeHead(200, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify(result)); + res.end(JSON.stringify({ error: errorValue })); + return; } + + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); res.writeHead(500, { 'Content-Type': 'application/json' }); - res.end(JSON.stringify({ error: (error as Error).message })); + res.end(JSON.stringify({ error: message })); + } + }; + + if (cachedParsed !== undefined) { + void handleBody(cachedParsed); + return; + } + + if (typeof cachedRawBody === 'string') { + try { + void handleBody(JSON.parse(cachedRawBody)); + } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: message })); + } + return; + } + + let body = ''; + req.on('data', (chunk: Buffer) => { body += chunk.toString(); }); + req.on('end', async () => { + try { + (req as any).__ccwRawBody = body; + const parsed = JSON.parse(body); + (req as any).body = parsed; + await handleBody(parsed); + } catch (error: unknown) { + const message = error instanceof Error ? error.message : String(error); + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: message })); } }); } +function getHeaderValue(header: string | string[] | undefined): string | null { + if (!header) return null; + if (Array.isArray(header)) return header[0] ?? null; + return header; +} + +function parseCookieHeader(cookieHeader: string | null | undefined): Record { + if (!cookieHeader) return {}; + + const cookies: Record = {}; + for (const part of cookieHeader.split(';')) { + const [rawName, ...rawValueParts] = part.trim().split('='); + if (!rawName) continue; + const rawValue = rawValueParts.join('='); + try { + cookies[rawName] = decodeURIComponent(rawValue); + } catch { + cookies[rawName] = rawValue; + } + } + return cookies; +} + +function appendSetCookie(res: http.ServerResponse, cookie: string): void { + const existing = res.getHeader('Set-Cookie'); + if (!existing) { + res.setHeader('Set-Cookie', cookie); + return; + } + + if (Array.isArray(existing)) { + res.setHeader('Set-Cookie', [...existing, cookie]); + return; + } + + res.setHeader('Set-Cookie', [String(existing), cookie]); +} + +function getOrCreateSessionId(req: http.IncomingMessage, res: http.ServerResponse): string { + const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie)); + const existing = cookies.ccw_session_id; + if (existing) return existing; + + const created = randomBytes(16).toString('hex'); + const attributes = [ + `ccw_session_id=${encodeURIComponent(created)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${24 * 60 * 60}`, + ]; + appendSetCookie(res, attributes.join('; ')); + return created; +} + +function setCsrfCookie(res: http.ServerResponse, token: string, maxAgeSeconds: number): void { + const attributes = [ + `XSRF-TOKEN=${encodeURIComponent(token)}`, + 'Path=/', + 'HttpOnly', + 'SameSite=Strict', + `Max-Age=${maxAgeSeconds}`, + ]; + appendSetCookie(res, attributes.join('; ')); +} + /** * Generate dashboard HTML with embedded CSS and JS */ @@ -244,17 +349,27 @@ window.INITIAL_PATH = '${normalizePathForDisplay(initialPath).replace(/\\/g, '/' * @returns {Promise} */ export async function startServer(options: ServerOptions = {}): Promise { - const port = options.port ?? 3456; + let serverPort = options.port ?? 3456; const initialPath = options.initialPath || process.cwd(); + const host = options.host ?? '127.0.0.1'; + + const tokenManager = getTokenManager(); + const secretKey = tokenManager.getSecretKey(); + tokenManager.getOrCreateAuthToken(); + const unauthenticatedPaths = new Set(['/api/auth/token', '/api/csrf-token']); const server = http.createServer(async (req, res) => { - const url = new URL(req.url, `http://localhost:${port}`); + const url = new URL(req.url ?? '/', `http://localhost:${serverPort}`); const pathname = url.pathname; // CORS headers for API requests - res.setHeader('Access-Control-Allow-Origin', '*'); + const originHeader = Array.isArray(req.headers.origin) ? req.headers.origin[0] : req.headers.origin; + res.setHeader('Access-Control-Allow-Origin', getCorsOrigin(originHeader, serverPort)); res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE, OPTIONS'); - res.setHeader('Access-Control-Allow-Headers', 'Content-Type'); + res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, X-CSRF-Token'); + res.setHeader('Access-Control-Allow-Credentials', 'true'); + res.setHeader('Access-Control-Expose-Headers', 'X-CSRF-Token'); + res.setHeader('Vary', 'Origin'); if (req.method === 'OPTIONS') { res.writeHead(200); @@ -277,12 +392,43 @@ export async function startServer(options: ServerOptions = {}): Promise = { 'js': 'application/javascript', 'css': 'text/css', 'json': 'application/json', @@ -431,7 +586,7 @@ export async function startServer(options: ServerOptions = {}): Promise { - server.listen(port, () => { - console.log(`Dashboard server running at http://localhost:${port}`); - console.log(`WebSocket endpoint available at ws://localhost:${port}/ws`); - console.log(`Hook endpoint available at POST http://localhost:${port}/api/hook`); + server.listen(serverPort, host, () => { + const addr = server.address(); + if (addr && typeof addr === 'object') { + serverPort = addr.port; + } + + console.log(`Dashboard server running at http://${host}:${serverPort}`); + console.log(`WebSocket endpoint available at ws://${host}:${serverPort}/ws`); + console.log(`Hook endpoint available at POST http://${host}:${serverPort}/api/hook`); resolve(server); }); server.on('error', reject); diff --git a/ccw/src/core/websocket.ts b/ccw/src/core/websocket.ts index efeb51af..c79c5458 100644 --- a/ccw/src/core/websocket.ts +++ b/ccw/src/core/websocket.ts @@ -1,11 +1,17 @@ -// @ts-nocheck import { createHash } from 'crypto'; +import type { IncomingMessage } from 'http'; +import type { Duplex } from 'stream'; // WebSocket clients for real-time notifications -export const wsClients = new Set(); +export const wsClients = new Set(); -export function handleWebSocketUpgrade(req, socket, head) { - const key = req.headers['sec-websocket-key']; +export function handleWebSocketUpgrade(req: IncomingMessage, socket: Duplex, _head: Buffer): void { + const header = req.headers['sec-websocket-key']; + const key = Array.isArray(header) ? header[0] : header; + if (!key) { + socket.end(); + return; + } const acceptKey = createHash('sha1') .update(key + '258EAFA5-E914-47DA-95CA-C5AB0DC85B11') .digest('base64'); @@ -26,7 +32,7 @@ export function handleWebSocketUpgrade(req, socket, head) { console.log(`[WS] Client connected (${wsClients.size} total)`); // Handle incoming messages - socket.on('data', (buffer) => { + socket.on('data', (buffer: Buffer) => { try { const frame = parseWebSocketFrame(buffer); if (!frame) return; @@ -74,7 +80,7 @@ export function handleWebSocketUpgrade(req, socket, head) { * Parse WebSocket frame (simplified) * Returns { opcode, payload } or null */ -export function parseWebSocketFrame(buffer) { +export function parseWebSocketFrame(buffer: Buffer): { opcode: number; payload: string } | null { if (buffer.length < 2) return null; const firstByte = buffer[0]; @@ -97,7 +103,7 @@ export function parseWebSocketFrame(buffer) { offset = 10; } - let mask = null; + let mask: Buffer | null = null; if (isMasked) { mask = buffer.slice(offset, offset + 4); offset += 4; @@ -117,7 +123,7 @@ export function parseWebSocketFrame(buffer) { /** * Create WebSocket frame */ -export function createWebSocketFrame(data) { +export function createWebSocketFrame(data: unknown): Buffer { const payload = Buffer.from(JSON.stringify(data), 'utf8'); const length = payload.length; @@ -147,7 +153,7 @@ export function createWebSocketFrame(data) { /** * Broadcast message to all connected WebSocket clients */ -export function broadcastToClients(data) { +export function broadcastToClients(data: unknown): void { const frame = createWebSocketFrame(data); for (const client of wsClients) { @@ -158,13 +164,15 @@ export function broadcastToClients(data) { } } - console.log(`[WS] Broadcast to ${wsClients.size} clients:`, data.type); + const eventType = + typeof data === 'object' && data !== null && 'type' in data ? (data as { type?: unknown }).type : undefined; + console.log(`[WS] Broadcast to ${wsClients.size} clients:`, eventType); } /** * Extract session ID from file path */ -export function extractSessionIdFromPath(filePath) { +export function extractSessionIdFromPath(filePath: string): string | null { // Normalize path const normalized = filePath.replace(/\\/g, '/'); diff --git a/ccw/src/tools/README.md b/ccw/src/tools/README.md new file mode 100644 index 00000000..e1e2df89 --- /dev/null +++ b/ccw/src/tools/README.md @@ -0,0 +1,29 @@ +# Tools + +This directory contains CCW “tools”: self-contained modules that implement concrete functionality (executors, integrations, etc.) that higher-level CLI and route layers call into. + +## CLI Executor + +The CLI executor is split into focused modules to keep responsibilities clear and keep the public API stable via re-exports. + +**Entry point** +- `ccw/src/tools/cli-executor.ts` – thin facade that re-exports from `cli-executor-core.ts` (stable import path for callers). + +**Modules** +- `ccw/src/tools/cli-executor-core.ts` – orchestrates tool execution, resume/merge logic, and conversation persistence wiring. +- `ccw/src/tools/cli-executor-utils.ts` – debug logging, tool availability checks (with cache), command building. +- `ccw/src/tools/cli-executor-state.ts` – conversation/history types + SQLite-backed storage helpers. +- `ccw/src/tools/cli-prompt-builder.ts` – prompt concatenation helpers (plain/YAML/JSON) and merged-conversation prompt formatting. + +**Dependency flow (high level)** +``` +cli-executor.ts + -> cli-executor-core.ts + -> cli-executor-utils.ts + -> cli-executor-state.ts + -> cli-prompt-builder.ts +``` + +**Public API** +- Prefer importing from `ccw/src/tools/cli-executor.ts`. +- `cli-executor-core.ts` re-exports prompt helpers/types from `cli-prompt-builder.ts` to preserve existing imports (`PromptConcatenator`, `buildPrompt`, `PromptFormat`, etc.). diff --git a/ccw/src/tools/cli-executor-core.ts b/ccw/src/tools/cli-executor-core.ts new file mode 100644 index 00000000..23e694c3 --- /dev/null +++ b/ccw/src/tools/cli-executor-core.ts @@ -0,0 +1,1074 @@ +/** + * CLI Executor Tool - Unified execution for external CLI tools + * Supports Gemini, Qwen, and Codex with streaming output + */ + +import { z } from 'zod'; +import type { ToolSchema, ToolResult } from '../types/tool.js'; +import { spawn, ChildProcess } from 'child_process'; +import { validatePath } from '../utils/path-resolver.js'; +import { escapeWindowsArg } from '../utils/shell-escape.js'; +import { buildCommand, checkToolAvailability, clearToolCache, debugLog, errorLog, type NativeResumeConfig, type ToolAvailability } from './cli-executor-utils.js'; +import type { ConversationRecord, ConversationTurn, ExecutionOutput, ExecutionRecord } from './cli-executor-state.js'; +import { + buildMergedPrompt, + buildMultiTurnPrompt, + mergeConversations, + type MergeResult +} from './cli-prompt-builder.js'; +import { + convertToConversation, + ensureHistoryDir, + getExecutionDetail, + getExecutionHistory, + getSqliteStore, + loadConversation, + saveConversation +} from './cli-executor-state.js'; + +// Track current running child process for cleanup on interruption +let currentChildProcess: ChildProcess | null = null; +let killTimeout: NodeJS.Timeout | null = null; +let killTimeoutProcess: ChildProcess | null = null; + +/** + * Kill the current running CLI child process + * Called when parent process receives SIGINT/SIGTERM + */ +export function killCurrentCliProcess(): boolean { + const child = currentChildProcess; + if (!child || child.killed) return false; + + debugLog('KILL', 'Killing current child process', { pid: child.pid }); + + try { + child.kill('SIGTERM'); + } catch { + // Ignore kill errors (process may already be gone) + } + + if (killTimeout) { + clearTimeout(killTimeout); + killTimeout = null; + killTimeoutProcess = null; + } + + // Force kill after 2 seconds if still running. + killTimeoutProcess = child; + killTimeout = setTimeout(() => { + const target = killTimeoutProcess; + if (!target || target !== currentChildProcess) return; + if (target.killed) return; + + try { + target.kill('SIGKILL'); + } catch { + // Ignore kill errors (process may already be gone) + } + }, 2000); + + return true; +} + +// LiteLLM integration +import { executeLiteLLMEndpoint } from './litellm-executor.js'; +import { findEndpointById } from '../config/litellm-api-config-manager.js'; + +// Native resume support +import { + trackNewSession, + getNativeResumeArgs, + supportsNativeResume, + calculateProjectHash +} from './native-session-discovery.js'; +import { + determineResumeStrategy, + buildContextPrefix, + getResumeModeDescription, + type ResumeDecision +} from './resume-strategy.js'; +import { + isToolEnabled as isToolEnabledFromConfig, + enableTool as enableToolFromConfig, + disableTool as disableToolFromConfig, + getPrimaryModel +} from './cli-config-manager.js'; + +// Define Zod schema for validation +const ParamsSchema = z.object({ + tool: z.enum(['gemini', 'qwen', 'codex']), + prompt: z.string().min(1, 'Prompt is required'), + mode: z.enum(['analysis', 'write', 'auto']).default('analysis'), + format: z.enum(['plain', 'yaml', 'json']).default('plain'), // Multi-turn prompt concatenation format + model: z.string().optional(), + cd: z.string().optional(), + includeDirs: z.string().optional(), + timeout: z.number().default(0), // 0 = no internal timeout, controlled by external caller (e.g., bash timeout) + resume: z.union([z.boolean(), z.string()]).optional(), // true = last, string = single ID or comma-separated IDs + id: z.string().optional(), // Custom execution ID (e.g., IMPL-001-step1) + noNative: z.boolean().optional(), // Force prompt concatenation instead of native resume + category: z.enum(['user', 'internal', 'insight']).default('user'), // Execution category for tracking + parentExecutionId: z.string().optional(), // Parent execution ID for fork/retry scenarios + stream: z.boolean().default(false), // false = cache full output (default), true = stream output via callback +}); + +type Params = z.infer; + +type NonEmptyArray = [T, ...T[]]; + +function assertNonEmptyArray(items: T[], message: string): asserts items is NonEmptyArray { + if (items.length === 0) { + throw new Error(message); + } +} + +/** + * Execute CLI tool with streaming output + */ +async function executeCliTool( + params: Record, + onOutput?: ((data: { type: string; data: string }) => void) | null +): Promise { + const parsed = ParamsSchema.safeParse(params); + if (!parsed.success) { + throw new Error(`Invalid params: ${parsed.error.message}`); + } + + const { tool, prompt, mode, format, model, cd, includeDirs, timeout, resume, id: customId, noNative, category, parentExecutionId } = parsed.data; + + // Validate and determine working directory early (needed for conversation lookup) + let workingDir: string; + if (cd) { + const validation = validatePath(cd, { mustExist: true }); + if (!validation.valid) { + throw new Error(`Invalid working directory (--cd): ${validation.error}. Path: ${cd}`); + } + workingDir = validation.path!; + } else { + workingDir = process.cwd(); + } + ensureHistoryDir(workingDir); // Ensure history directory exists + + // NEW: Check if model is a custom LiteLLM endpoint ID + if (model) { + const endpoint = findEndpointById(workingDir, model); + if (endpoint) { + // Route to LiteLLM executor + if (onOutput) { + onOutput({ type: 'stderr', data: `[Routing to LiteLLM endpoint: ${model}]\n` }); + } + + const result = await executeLiteLLMEndpoint({ + prompt, + endpointId: model, + baseDir: workingDir, + cwd: cd, + includeDirs: includeDirs ? includeDirs.split(',').map(d => d.trim()) : undefined, + enableCache: true, + onOutput: onOutput || undefined, + }); + + // Convert LiteLLM result to ExecutionOutput format + const startTime = Date.now(); + const endTime = Date.now(); + const duration = endTime - startTime; + + const execution: ExecutionRecord = { + id: customId || `${Date.now()}-litellm`, + timestamp: new Date(startTime).toISOString(), + tool: 'litellm', + model: result.model, + mode, + prompt, + status: result.success ? 'success' : 'error', + exit_code: result.success ? 0 : 1, + duration_ms: duration, + output: { + stdout: result.output, + stderr: result.error || '', + truncated: false, + }, + }; + + const conversation = convertToConversation(execution); + + // Try to save to history + try { + saveConversation(workingDir, conversation); + } catch (err) { + console.error('[CLI Executor] Failed to save LiteLLM history:', (err as Error).message); + } + + return { + success: result.success, + execution, + conversation, + stdout: result.output, + stderr: result.error || '', + }; + } + } + + // Get SQLite store for native session lookup + const store = await getSqliteStore(workingDir); + + // Determine conversation ID and load existing conversation + // Logic: + // - If --resume (multiple IDs): merge conversations + // - With --id: create new merged conversation + // - Without --id: append to ALL source conversations + // - If --resume AND --id : fork - read context from resume ID, create new conversation with newId + // - If --id provided (no resume): use that ID (create new or append) + // - If --resume without --id: use resume ID (append to existing) + // - No params: create new with auto-generated ID + let conversationId: string; + let existingConversation: ConversationRecord | null = null; + let contextConversation: ConversationRecord | null = null; // For fork scenario + let mergeResult: MergeResult | null = null; // For merge scenario + let sourceConversations: ConversationRecord[] = []; // All source conversations for merge + + // Parse resume IDs (can be comma-separated for merge) + const resumeIds: string[] = resume + ? (typeof resume === 'string' ? resume.split(',').map(id => id.trim()).filter(Boolean) : []) + : []; + const isMerge = resumeIds.length > 1; + const resumeId = resumeIds.length === 1 ? resumeIds[0] : null; + + if (isMerge) { + // Merge scenario: multiple resume IDs + sourceConversations = resumeIds + .map(id => loadConversation(workingDir, id)) + .filter((c): c is ConversationRecord => c !== null); + + // Guard against empty merge sources before accessing sourceConversations[0]. + assertNonEmptyArray( + sourceConversations, + `No valid conversations found for merge: ${resumeIds.join(', ')}` + ); + + mergeResult = mergeConversations(sourceConversations); + debugLog('MERGE', 'Merged conversations', { + sourceConversationCount: sourceConversations.length, + resumeIds + }); + + if (customId) { + // Create new merged conversation with custom ID + conversationId = customId; + existingConversation = loadConversation(workingDir, customId); + } else { + // Will append to ALL source conversations (handled in save logic) + // Use first source conversation ID as primary + conversationId = sourceConversations[0].id; + existingConversation = sourceConversations[0]; + } + } else if (customId && resumeId) { + // Fork: read context from resume ID, but create new conversation with custom ID + conversationId = customId; + contextConversation = loadConversation(workingDir, resumeId); + existingConversation = loadConversation(workingDir, customId); + } else if (customId) { + // Use custom ID - may be new or existing + conversationId = customId; + existingConversation = loadConversation(workingDir, customId); + } else if (resumeId) { + // Resume single ID without new ID - append to existing conversation + conversationId = resumeId; + existingConversation = loadConversation(workingDir, resumeId); + } else if (resume) { + // resume=true: get last conversation for this tool + const history = getExecutionHistory(workingDir, { limit: 1, tool }); + if (history.executions.length > 0) { + conversationId = history.executions[0].id; + existingConversation = loadConversation(workingDir, conversationId); + } else { + // No previous conversation, create new + conversationId = `${Date.now()}-${tool}`; + } + } else { + // New conversation with auto-generated ID + conversationId = `${Date.now()}-${tool}`; + } + + // Determine resume strategy (native vs prompt-concat vs hybrid) + let resumeDecision: ResumeDecision | null = null; + let nativeResumeConfig: NativeResumeConfig | undefined; + + // resume=true (latest) - use native latest if supported + if (resume === true && !noNative && supportsNativeResume(tool)) { + resumeDecision = { + strategy: 'native', + isLatest: true, + primaryConversationId: conversationId + }; + } + // Use strategy engine for complex scenarios + else if (resumeIds.length > 0 && !noNative) { + resumeDecision = determineResumeStrategy({ + tool, + resumeIds, + customId, + forcePromptConcat: noNative, + getNativeSessionId: (ccwId) => store.getNativeSessionId(ccwId), + getConversation: (ccwId) => loadConversation(workingDir, ccwId), + getConversationTool: (ccwId) => { + const conv = loadConversation(workingDir, ccwId); + return conv?.tool || null; + } + }); + } + + // Configure native resume if strategy decided to use it + if (resumeDecision && (resumeDecision.strategy === 'native' || resumeDecision.strategy === 'hybrid')) { + nativeResumeConfig = { + enabled: true, + sessionId: resumeDecision.nativeSessionId, + isLatest: resumeDecision.isLatest + }; + } + + // Build final prompt with conversation context + // For native: minimal prompt (native tool handles context) + // For hybrid: context prefix from other conversations + new prompt + // For prompt-concat: full multi-turn prompt + let finalPrompt = prompt; + + if (resumeDecision?.strategy === 'native') { + // Native mode: just use the new prompt, tool handles context + finalPrompt = prompt; + } else if (resumeDecision?.strategy === 'hybrid' && resumeDecision.contextTurns?.length) { + // Hybrid mode: add context prefix from other conversations + const contextPrefix = buildContextPrefix(resumeDecision.contextTurns, format); + finalPrompt = contextPrefix + prompt; + } else if (mergeResult && mergeResult.mergedTurns.length > 0) { + // Full merge: use merged prompt + finalPrompt = buildMergedPrompt(mergeResult, prompt, format); + } else { + // Standard prompt-concat + const conversationForContext = contextConversation || existingConversation; + if (conversationForContext && conversationForContext.turns.length > 0) { + finalPrompt = buildMultiTurnPrompt(conversationForContext, prompt, format); + } + } + + // Check tool availability + const toolStatus = await checkToolAvailability(tool); + if (!toolStatus.available) { + throw new Error(`CLI tool not available: ${tool}. Please ensure it is installed and in PATH.`); + } + + // Log resume mode for debugging + if (resumeDecision) { + const modeDesc = getResumeModeDescription(resumeDecision); + if (onOutput) { + onOutput({ type: 'stderr', data: `[Resume mode: ${modeDesc}]\n` }); + } + } + + // Use configured primary model if no explicit model provided + const effectiveModel = model || getPrimaryModel(workingDir, tool); + + // Build command + const { command, args, useStdin } = buildCommand({ + tool, + prompt: finalPrompt, + mode, + model: effectiveModel, + dir: cd, + include: includeDirs, + nativeResume: nativeResumeConfig + }); + + const startTime = Date.now(); + + debugLog('EXEC', `Starting CLI execution`, { + tool, + mode, + workingDir, + conversationId, + promptLength: finalPrompt.length, + hasResume: !!resume, + hasCustomId: !!customId + }); + + return new Promise((resolve, reject) => { + // Windows requires shell: true for npm global commands (.cmd files) + // Unix-like systems can use shell: false for direct execution + const isWindows = process.platform === 'win32'; + + // When using cmd.exe via `shell: true`, escape args to prevent metacharacter injection. + const commandToSpawn = isWindows ? escapeWindowsArg(command) : command; + const argsToSpawn = isWindows ? args.map(escapeWindowsArg) : args; + + debugLog('SPAWN', `Spawning process`, { + command, + args, + cwd: workingDir, + shell: isWindows, + useStdin, + platform: process.platform, + fullCommand: `${command} ${args.join(' ')}`, + ...(isWindows ? { escapedCommand: commandToSpawn, escapedArgs: argsToSpawn, escapedFullCommand: `${commandToSpawn} ${argsToSpawn.join(' ')}` } : {}) + }); + + const child = spawn(commandToSpawn, argsToSpawn, { + cwd: workingDir, + shell: isWindows, // Enable shell on Windows for .cmd files + stdio: [useStdin ? 'pipe' : 'ignore', 'pipe', 'pipe'] + }); + + // Track current child process for cleanup on interruption + currentChildProcess = child; + + debugLog('SPAWN', `Process spawned`, { pid: child.pid }); + + // Write prompt to stdin if using stdin mode (for gemini/qwen) + if (useStdin && child.stdin) { + debugLog('STDIN', `Writing prompt to stdin (${finalPrompt.length} bytes)`); + child.stdin.write(finalPrompt); + child.stdin.end(); + } + + let stdout = ''; + let stderr = ''; + let timedOut = false; + + // Handle stdout + child.stdout!.on('data', (data) => { + const text = data.toString(); + stdout += text; + if (onOutput) { + onOutput({ type: 'stdout', data: text }); + } + }); + + // Handle stderr + child.stderr!.on('data', (data) => { + const text = data.toString(); + stderr += text; + if (onOutput) { + onOutput({ type: 'stderr', data: text }); + } + }); + + // Handle completion + child.on('close', async (code) => { + if (killTimeout && killTimeoutProcess === child) { + clearTimeout(killTimeout); + killTimeout = null; + killTimeoutProcess = null; + } + + // Clear current child process reference + currentChildProcess = null; + + const endTime = Date.now(); + const duration = endTime - startTime; + + debugLog('CLOSE', `Process closed`, { + exitCode: code, + duration: `${duration}ms`, + timedOut, + stdoutLength: stdout.length, + stderrLength: stderr.length + }); + + // Determine status - prioritize output content over exit code + let status: 'success' | 'error' | 'timeout' = 'success'; + if (timedOut) { + status = 'timeout'; + debugLog('STATUS', `Execution timed out after ${duration}ms`); + } else if (code !== 0) { + // Non-zero exit code doesn't always mean failure + // Check if there's valid output (AI response) - treat as success + const hasValidOutput = stdout.trim().length > 0; + const hasFatalError = stderr.includes('FATAL') || + stderr.includes('Authentication failed') || + stderr.includes('API key') || + stderr.includes('rate limit exceeded'); + + debugLog('STATUS', `Non-zero exit code analysis`, { + exitCode: code, + hasValidOutput, + hasFatalError, + stderrPreview: stderr.substring(0, 500) + }); + + if (hasValidOutput && !hasFatalError) { + // Has output and no fatal errors - treat as success despite exit code + status = 'success'; + debugLog('STATUS', `Treating as success (has valid output, no fatal errors)`); + } else { + status = 'error'; + errorLog('EXEC', `CLI execution failed`, undefined, { + exitCode: code, + tool, + command, + args, + workingDir, + stderrFull: stderr, + stdoutPreview: stdout.substring(0, 200) + }); + } + } else { + debugLog('STATUS', `Execution successful (exit code 0)`); + } + + // Create new turn - cache full output when not streaming (default) + const shouldCache = !parsed.data.stream; + const newTurnOutput = { + stdout: stdout.substring(0, 10240), // Truncate preview to 10KB + stderr: stderr.substring(0, 2048), // Truncate preview to 2KB + truncated: stdout.length > 10240 || stderr.length > 2048, + cached: shouldCache, + stdout_full: shouldCache ? stdout : undefined, + stderr_full: shouldCache ? stderr : undefined + }; + + // Determine base turn number for merge scenarios + const baseTurnNumber = isMerge && mergeResult + ? mergeResult.mergedTurns.length + 1 + : (existingConversation ? existingConversation.turns.length + 1 : 1); + + const newTurn: ConversationTurn = { + turn: baseTurnNumber, + timestamp: new Date(startTime).toISOString(), + prompt, + duration_ms: duration, + status, + exit_code: code, + output: newTurnOutput + }; + + // Create or update conversation record + let conversation: ConversationRecord; + + if (isMerge && mergeResult && !customId) { + // Merge without --id: append to ALL source conversations + // Save new turn to each source conversation + const savedConversations: ConversationRecord[] = []; + for (const srcConv of sourceConversations) { + const turnForSrc: ConversationTurn = { + ...newTurn, + turn: srcConv.turns.length + 1 // Use each conversation's turn count + }; + const updatedConv: ConversationRecord = { + ...srcConv, + updated_at: new Date().toISOString(), + total_duration_ms: srcConv.total_duration_ms + duration, + turn_count: srcConv.turns.length + 1, + latest_status: status, + turns: [...srcConv.turns, turnForSrc] + }; + savedConversations.push(updatedConv); + } + // Use first conversation as primary + conversation = savedConversations[0]; + // Save all source conversations + try { + for (const conv of savedConversations) { + saveConversation(workingDir, conv); + } + } catch (err) { + console.error('[CLI Executor] Failed to save merged histories:', (err as Error).message); + } + } else if (isMerge && mergeResult && customId) { + // Merge with --id: create new conversation with merged turns + new turn + // Convert merged turns to regular turns (without source_id) + const mergedTurns: ConversationTurn[] = mergeResult.mergedTurns.map((mt, idx) => ({ + turn: idx + 1, + timestamp: mt.timestamp, + prompt: mt.prompt, + duration_ms: mt.duration_ms, + status: mt.status, + exit_code: mt.exit_code, + output: mt.output + })); + + conversation = existingConversation + ? { + ...existingConversation, + updated_at: new Date().toISOString(), + total_duration_ms: existingConversation.total_duration_ms + duration, + turn_count: existingConversation.turns.length + 1, + latest_status: status, + turns: [...existingConversation.turns, newTurn] + } + : { + id: conversationId, + created_at: new Date(startTime).toISOString(), + updated_at: new Date().toISOString(), + tool, + model: model || 'default', + mode, + category, + total_duration_ms: mergeResult.totalDuration + duration, + turn_count: mergedTurns.length + 1, + latest_status: status, + turns: [...mergedTurns, newTurn] + }; + // Save merged conversation + try { + saveConversation(workingDir, conversation); + } catch (err) { + console.error('[CLI Executor] Failed to save merged conversation:', (err as Error).message); + } + } else { + // Normal scenario: single conversation + conversation = existingConversation + ? { + ...existingConversation, + updated_at: new Date().toISOString(), + total_duration_ms: existingConversation.total_duration_ms + duration, + turn_count: existingConversation.turns.length + 1, + latest_status: status, + turns: [...existingConversation.turns, newTurn] + } + : { + id: conversationId, + created_at: new Date(startTime).toISOString(), + updated_at: new Date().toISOString(), + tool, + model: model || 'default', + mode, + category, + total_duration_ms: duration, + turn_count: 1, + latest_status: status, + turns: [newTurn], + parent_execution_id: parentExecutionId + }; + // Try to save conversation to history + try { + saveConversation(workingDir, conversation); + } catch (err) { + // Non-fatal: continue even if history save fails + console.error('[CLI Executor] Failed to save history:', (err as Error).message); + } + } + + // Track native session after execution (awaited to prevent process hang) + // Pass prompt for precise matching in parallel execution scenarios + try { + const nativeSession = await trackNewSession(tool, new Date(startTime), workingDir, prompt); + if (nativeSession) { + // Save native session mapping + try { + store.saveNativeSessionMapping({ + ccw_id: conversationId, + tool, + native_session_id: nativeSession.sessionId, + native_session_path: nativeSession.filePath, + project_hash: nativeSession.projectHash, + created_at: new Date().toISOString() + }); + } catch (err) { + console.error('[CLI Executor] Failed to save native session mapping:', (err as Error).message); + } + } + } catch (err) { + console.error('[CLI Executor] Failed to track native session:', (err as Error).message); + } + + // Create legacy execution record for backward compatibility + const execution: ExecutionRecord = { + id: conversationId, + timestamp: new Date(startTime).toISOString(), + tool, + model: model || 'default', + mode, + prompt, + status, + exit_code: code, + duration_ms: duration, + output: newTurnOutput + }; + + resolve({ + success: status === 'success', + execution, + conversation, + stdout, + stderr + }); + }); + + // Handle errors + child.on('error', (error) => { + errorLog('SPAWN', `Failed to spawn process`, error, { + tool, + command, + args, + workingDir, + fullCommand: `${command} ${args.join(' ')}`, + platform: process.platform, + path: process.env.PATH?.split(process.platform === 'win32' ? ';' : ':').slice(0, 10).join('\n ') + '...' + }); + reject(new Error(`Failed to spawn ${tool}: ${error.message}\n Command: ${command} ${args.join(' ')}\n Working Dir: ${workingDir}`)); + }); + + // Timeout handling (timeout=0 disables internal timeout, controlled by external caller) + let timeoutId: NodeJS.Timeout | null = null; + if (timeout > 0) { + timeoutId = setTimeout(() => { + timedOut = true; + child.kill('SIGTERM'); + setTimeout(() => { + if (!child.killed) { + child.kill('SIGKILL'); + } + }, 5000); + }, timeout); + } + + child.on('close', () => { + if (timeoutId) { + clearTimeout(timeoutId); + } + }); + }); +} + +// Tool schema for MCP +export const schema: ToolSchema = { + name: 'cli_executor', + description: `Execute external CLI tools (gemini/qwen/codex) with unified interface. +Modes: +- analysis: Read-only operations (default) +- write: File modifications allowed +- auto: Full autonomous operations (codex only)`, + inputSchema: { + type: 'object', + properties: { + tool: { + type: 'string', + enum: ['gemini', 'qwen', 'codex'], + description: 'CLI tool to execute' + }, + prompt: { + type: 'string', + description: 'Prompt to send to the CLI tool' + }, + mode: { + type: 'string', + enum: ['analysis', 'write', 'auto'], + description: 'Execution mode (default: analysis)', + default: 'analysis' + }, + model: { + type: 'string', + description: 'Model override (tool-specific)' + }, + cd: { + type: 'string', + description: 'Working directory for execution (-C for codex)' + }, + includeDirs: { + type: 'string', + description: 'Additional directories (comma-separated). Maps to --include-directories for gemini/qwen, --add-dir for codex' + }, + timeout: { + type: 'number', + description: 'Timeout in milliseconds (default: 0 = disabled, controlled by external caller)', + default: 0 + } + }, + required: ['tool', 'prompt'] + } +}; + +// Handler function +export async function handler(params: Record): Promise> { + try { + const result = await executeCliTool(params); + return { + success: result.success, + result + }; + } catch (error) { + return { + success: false, + error: `CLI execution failed: ${(error as Error).message}` + }; + } +} + +export { + batchDeleteExecutionsAsync, + deleteExecution, + deleteExecutionAsync, + getConversationDetail, + getConversationDetailWithNativeInfo, + getExecutionDetail, + getExecutionHistory, + getExecutionHistoryAsync +} from './cli-executor-state.js'; + +/** + * Get status of all CLI tools + */ +export async function getCliToolsStatus(): Promise> { + const tools = ['gemini', 'qwen', 'codex', 'claude']; + const results: Record = {}; + + await Promise.all(tools.map(async (tool) => { + results[tool] = await checkToolAvailability(tool); + })); + + return results; +} + +// CLI tool package mapping +const CLI_TOOL_PACKAGES: Record = { + gemini: '@google/gemini-cli', + qwen: '@qwen-code/qwen-code', + codex: '@openai/codex', + claude: '@anthropic-ai/claude-code' +}; + +// Disabled tools storage (in-memory fallback, main storage is in cli-config.json) +const disabledTools = new Set(); + +// Default working directory for config operations +let configBaseDir = process.cwd(); + +/** + * Set the base directory for config operations + */ +export function setConfigBaseDir(dir: string): void { + configBaseDir = dir; +} + +/** + * Install a CLI tool via npm + */ +export async function installCliTool(tool: string): Promise<{ success: boolean; error?: string }> { + const packageName = CLI_TOOL_PACKAGES[tool]; + if (!packageName) { + return { success: false, error: `Unknown tool: ${tool}` }; + } + + return new Promise((resolve) => { + const child = spawn('npm', ['install', '-g', packageName], { + shell: true, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + let stderr = ''; + child.stderr?.on('data', (data) => { stderr += data.toString(); }); + + child.on('close', (code) => { + // Clear cache to force re-check + clearToolCache(); + + if (code === 0) { + resolve({ success: true }); + } else { + resolve({ success: false, error: stderr || `npm install failed with code ${code}` }); + } + }); + + child.on('error', (err) => { + resolve({ success: false, error: err.message }); + }); + + // Timeout after 2 minutes + setTimeout(() => { + child.kill(); + resolve({ success: false, error: 'Installation timed out' }); + }, 120000); + }); +} + +/** + * Uninstall a CLI tool via npm + */ +export async function uninstallCliTool(tool: string): Promise<{ success: boolean; error?: string }> { + const packageName = CLI_TOOL_PACKAGES[tool]; + if (!packageName) { + return { success: false, error: `Unknown tool: ${tool}` }; + } + + return new Promise((resolve) => { + const child = spawn('npm', ['uninstall', '-g', packageName], { + shell: true, + stdio: ['ignore', 'pipe', 'pipe'] + }); + + let stderr = ''; + child.stderr?.on('data', (data) => { stderr += data.toString(); }); + + child.on('close', (code) => { + // Clear cache to force re-check + clearToolCache(); + + if (code === 0) { + resolve({ success: true }); + } else { + resolve({ success: false, error: stderr || `npm uninstall failed with code ${code}` }); + } + }); + + child.on('error', (err) => { + resolve({ success: false, error: err.message }); + }); + + // Timeout after 1 minute + setTimeout(() => { + child.kill(); + resolve({ success: false, error: 'Uninstallation timed out' }); + }, 60000); + }); +} + +/** + * Enable a CLI tool (updates config file) + */ +export function enableCliTool(tool: string): { success: boolean } { + try { + enableToolFromConfig(configBaseDir, tool); + disabledTools.delete(tool); // Also update in-memory fallback + return { success: true }; + } catch (err) { + console.error('[cli-executor] Error enabling tool:', err); + disabledTools.delete(tool); // Fallback to in-memory + return { success: true }; + } +} + +/** + * Disable a CLI tool (updates config file) + */ +export function disableCliTool(tool: string): { success: boolean } { + try { + disableToolFromConfig(configBaseDir, tool); + disabledTools.add(tool); // Also update in-memory fallback + return { success: true }; + } catch (err) { + console.error('[cli-executor] Error disabling tool:', err); + disabledTools.add(tool); // Fallback to in-memory + return { success: true }; + } +} + +/** + * Check if a tool is enabled (reads from config file) + */ +export function isToolEnabled(tool: string): boolean { + try { + return isToolEnabledFromConfig(configBaseDir, tool); + } catch { + // Fallback to in-memory check + return !disabledTools.has(tool); + } +} + +/** + * Get full status of all CLI tools including enabled state + */ +export async function getCliToolsFullStatus(): Promise> { + const tools = Object.keys(CLI_TOOL_PACKAGES); + const results: Record = {}; + + await Promise.all(tools.map(async (tool) => { + const availability = await checkToolAvailability(tool); + results[tool] = { + available: availability.available, + enabled: isToolEnabled(tool), + path: availability.path, + packageName: CLI_TOOL_PACKAGES[tool] + }; + })); + + return results; +} + + +/** + * Build continuation prompt with previous conversation context (legacy) + */ +function buildContinuationPrompt(previous: ExecutionRecord, additionalPrompt?: string): string { + const parts: string[] = []; + + // Add previous conversation context + parts.push('=== PREVIOUS CONVERSATION ==='); + parts.push(''); + parts.push('USER PROMPT:'); + parts.push(previous.prompt); + parts.push(''); + parts.push('ASSISTANT RESPONSE:'); + parts.push(previous.output.stdout || '[No output recorded]'); + parts.push(''); + parts.push('=== CONTINUATION ==='); + parts.push(''); + + if (additionalPrompt) { + parts.push(additionalPrompt); + } else { + parts.push('Continue from where we left off. What should we do next?'); + } + + return parts.join('\n'); +} + +/** + * Get previous execution for resume + * @param baseDir - Working directory + * @param tool - Tool to filter by + * @param resume - true for last, or execution ID string + */ +function getPreviousExecution(baseDir: string, tool: string, resume: boolean | string): ExecutionRecord | null { + if (typeof resume === 'string') { + // Resume specific execution by ID + return getExecutionDetail(baseDir, resume); + } else if (resume === true) { + // Resume last execution for this tool + const history = getExecutionHistory(baseDir, { limit: 1, tool }); + if (history.executions.length === 0) { + return null; + } + return getExecutionDetail(baseDir, history.executions[0].id); + } + return null; +} + +/** + * Latest execution + native session history functions are re-exported from state. + */ +export { + getEnrichedConversation, + getFormattedNativeConversation, + getHistoryWithNativeInfo, + getLatestExecution, + getNativeConversationPairs, + getNativeSessionContent +} from './cli-executor-state.js'; + +// Export types +export type { ExecutionCategory, ConversationRecord, ConversationTurn, ExecutionRecord } from './cli-executor-state.js'; +export type { PromptFormat, ConcatOptions } from './cli-prompt-builder.js'; + +// Export utility functions and tool definition for backward compatibility +export { executeCliTool, checkToolAvailability, clearToolCache }; + +// Export prompt concatenation utilities +export { PromptConcatenator, createPromptConcatenator, buildPrompt, buildMultiTurnPrompt } from './cli-prompt-builder.js'; + +// Note: Async storage functions (getExecutionHistoryAsync, deleteExecutionAsync, +// batchDeleteExecutionsAsync) are exported at declaration site - SQLite storage only + +// Export tool definition (for legacy imports) - This allows direct calls to execute with onOutput +export const cliExecutorTool = { + schema, + execute: executeCliTool // Use executeCliTool directly which supports onOutput callback +}; diff --git a/ccw/src/tools/cli-executor-state.ts b/ccw/src/tools/cli-executor-state.ts new file mode 100644 index 00000000..b12214c2 --- /dev/null +++ b/ccw/src/tools/cli-executor-state.ts @@ -0,0 +1,553 @@ +/** + * CLI Executor State + * Conversation history + execution record storage (SQLite-backed) + */ + +import type { HistoryIndexEntry } from './cli-history-store.js'; +import { StoragePaths, ensureStorageDir } from '../config/storage-paths.js'; + +// Lazy-loaded SQLite store module +let sqliteStoreModule: typeof import('./cli-history-store.js') | null = null; + +/** + * Get or initialize SQLite store (async) + */ +export async function getSqliteStore(baseDir: string) { + if (!sqliteStoreModule) { + sqliteStoreModule = await import('./cli-history-store.js'); + } + return sqliteStoreModule.getHistoryStore(baseDir); +} + +/** + * Get SQLite store (sync - uses cached module) + */ +function getSqliteStoreSync(baseDir: string) { + if (!sqliteStoreModule) { + throw new Error('SQLite store not initialized. Call an async function first.'); + } + return sqliteStoreModule.getHistoryStore(baseDir); +} + +// Execution category types +export type ExecutionCategory = 'user' | 'internal' | 'insight'; + +// Single turn in a conversation +export interface ConversationTurn { + turn: number; + timestamp: string; + prompt: string; + duration_ms: number; + status: 'success' | 'error' | 'timeout'; + exit_code: number | null; + output: { + stdout: string; + stderr: string; + truncated: boolean; + }; +} + +// Multi-turn conversation record +export interface ConversationRecord { + id: string; + created_at: string; + updated_at: string; + tool: string; + model: string; + mode: string; + category: ExecutionCategory; // user | internal | insight + total_duration_ms: number; + turn_count: number; + latest_status: 'success' | 'error' | 'timeout'; + turns: ConversationTurn[]; + parent_execution_id?: string; // For fork/retry scenarios +} + +// Legacy single execution record (for backward compatibility) +export interface ExecutionRecord { + id: string; + timestamp: string; + tool: string; + model: string; + mode: string; + prompt: string; + status: 'success' | 'error' | 'timeout'; + exit_code: number | null; + duration_ms: number; + output: { + stdout: string; + stderr: string; + truncated: boolean; + }; +} + +interface HistoryIndex { + version: number; + total_executions: number; + executions: { + id: string; + timestamp: string; // created_at for conversations + updated_at?: string; // last update time + tool: string; + status: string; + duration_ms: number; + turn_count?: number; // number of turns in conversation + prompt_preview: string; + }[]; +} + +export interface ExecutionOutput { + success: boolean; + execution: ExecutionRecord; + conversation: ConversationRecord; // Full conversation record + stdout: string; + stderr: string; +} + +/** + * Ensure history directory exists (uses centralized storage) + */ +export function ensureHistoryDir(baseDir: string): string { + const paths = StoragePaths.project(baseDir); + ensureStorageDir(paths.cliHistory); + return paths.cliHistory; +} + +/** + * Save conversation to SQLite + * @param baseDir - Project base directory (NOT historyDir) + */ +async function saveConversationAsync(baseDir: string, conversation: ConversationRecord): Promise { + const store = await getSqliteStore(baseDir); + store.saveConversation(conversation); +} + +/** + * Sync wrapper for saveConversation (uses cached SQLite module) + * @param baseDir - Project base directory (NOT historyDir) + */ +export function saveConversation(baseDir: string, conversation: ConversationRecord): void { + try { + const store = getSqliteStoreSync(baseDir); + store.saveConversation(conversation); + } catch { + // If sync not available, queue for async save + saveConversationAsync(baseDir, conversation).catch(err => { + console.error('[CLI Executor] Failed to save conversation:', err.message); + }); + } +} + +/** + * Load existing conversation by ID from SQLite + * @param baseDir - Project base directory (NOT historyDir) + */ +async function loadConversationAsync(baseDir: string, conversationId: string): Promise { + const store = await getSqliteStore(baseDir); + return store.getConversation(conversationId); +} + +/** + * Sync wrapper for loadConversation (uses cached SQLite module) + * @param baseDir - Project base directory (NOT historyDir) + */ +export function loadConversation(baseDir: string, conversationId: string): ConversationRecord | null { + try { + const store = getSqliteStoreSync(baseDir); + return store.getConversation(conversationId); + } catch { + // SQLite not initialized yet, return null + return null; + } +} + +/** + * Convert legacy ExecutionRecord to ConversationRecord + */ +export function convertToConversation(record: ExecutionRecord): ConversationRecord { + return { + id: record.id, + created_at: record.timestamp, + updated_at: record.timestamp, + tool: record.tool, + model: record.model, + mode: record.mode, + category: 'user', // Legacy records default to user category + total_duration_ms: record.duration_ms, + turn_count: 1, + latest_status: record.status, + turns: [{ + turn: 1, + timestamp: record.timestamp, + prompt: record.prompt, + duration_ms: record.duration_ms, + status: record.status, + exit_code: record.exit_code, + output: record.output + }] + }; +} + +/** + * Get execution history from SQLite (centralized storage) + */ +export async function getExecutionHistoryAsync(baseDir: string, options: { + limit?: number; + tool?: string | null; + status?: string | null; + category?: ExecutionCategory | null; + search?: string | null; + recursive?: boolean; +} = {}): Promise<{ + total: number; + count: number; + executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[]; +}> { + const { limit = 50, tool = null, status = null, category = null, search = null, recursive = false } = options; + + // Recursive mode: aggregate data from parent and all child projects + if (recursive) { + const { scanChildProjectsAsync } = await import('../config/storage-paths.js'); + const childProjects = await scanChildProjectsAsync(baseDir); + + let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = []; + let totalCount = 0; + + // Query parent project - apply limit at source to reduce memory footprint + try { + const parentStore = await getSqliteStore(baseDir); + const parentResult = parentStore.getHistory({ limit, tool, status, category, search }); + totalCount += parentResult.total; + + for (const exec of parentResult.executions) { + allExecutions.push({ ...exec, sourceDir: baseDir }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error); + } + } + + // Query all child projects - apply limit to each child + for (const child of childProjects) { + try { + const childStore = await getSqliteStore(child.projectPath); + const childResult = childStore.getHistory({ limit, tool, status, category, search }); + totalCount += childResult.total; + + for (const exec of childResult.executions) { + allExecutions.push({ + ...exec, + sourceDir: child.relativePath // Show relative path for clarity + }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error); + } + } + } + + // Sort by timestamp (newest first) and apply limit + allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp)); + const limitedExecutions = allExecutions.slice(0, limit); + + return { + total: totalCount, + count: limitedExecutions.length, + executions: limitedExecutions + }; + } + + // Non-recursive mode: only query current project + const store = await getSqliteStore(baseDir); + return store.getHistory({ limit, tool, status, category, search }); +} + +/** + * Get execution history (sync version - uses cached SQLite module) + */ +export function getExecutionHistory(baseDir: string, options: { + limit?: number; + tool?: string | null; + status?: string | null; + recursive?: boolean; +} = {}): { + total: number; + count: number; + executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[]; +} { + const { limit = 50, tool = null, status = null, recursive = false } = options; + + try { + if (recursive) { + const { scanChildProjects } = require('../config/storage-paths.js'); + const childProjects = scanChildProjects(baseDir); + + let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = []; + let totalCount = 0; + + // Query parent project - apply limit at source + try { + const parentStore = getSqliteStoreSync(baseDir); + const parentResult = parentStore.getHistory({ limit, tool, status }); + totalCount += parentResult.total; + + for (const exec of parentResult.executions) { + allExecutions.push({ ...exec, sourceDir: baseDir }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error); + } + } + + // Query all child projects - apply limit to each child + for (const child of childProjects) { + try { + const childStore = getSqliteStoreSync(child.projectPath); + const childResult = childStore.getHistory({ limit, tool, status }); + totalCount += childResult.total; + + for (const exec of childResult.executions) { + allExecutions.push({ + ...exec, + sourceDir: child.relativePath // Show relative path for clarity + }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error); + } + } + } + + // Sort by timestamp (newest first) and apply limit + allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp)); + const limitedExecutions = allExecutions.slice(0, limit); + + return { + total: totalCount, + count: limitedExecutions.length, + executions: limitedExecutions + }; + } + + const store = getSqliteStoreSync(baseDir); + return store.getHistory({ limit, tool, status }); + } catch { + // SQLite not initialized yet, return empty + return { total: 0, count: 0, executions: [] }; + } +} + +/** + * Get conversation detail by ID + */ +export function getConversationDetail(baseDir: string, conversationId: string): ConversationRecord | null { + // Pass baseDir directly - loadConversation will resolve the correct storage path + return loadConversation(baseDir, conversationId); +} + +/** + * Get conversation detail with native session mapping info + */ +export function getConversationDetailWithNativeInfo(baseDir: string, conversationId: string) { + try { + const store = getSqliteStoreSync(baseDir); + return store.getConversationWithNativeInfo(conversationId); + } catch { + // SQLite not initialized, return null + return null; + } +} + +/** + * Get execution detail by ID (legacy, returns ExecutionRecord for backward compatibility) + */ +export function getExecutionDetail(baseDir: string, executionId: string): ExecutionRecord | null { + const conversation = getConversationDetail(baseDir, executionId); + if (!conversation) return null; + + // Convert to legacy ExecutionRecord format (using latest turn) + const latestTurn = conversation.turns[conversation.turns.length - 1]; + return { + id: conversation.id, + timestamp: conversation.created_at, + tool: conversation.tool, + model: conversation.model, + mode: conversation.mode, + prompt: latestTurn.prompt, + status: conversation.latest_status, + exit_code: latestTurn.exit_code, + duration_ms: conversation.total_duration_ms, + output: latestTurn.output + }; +} + +/** + * Delete execution by ID (async version) + */ +export async function deleteExecutionAsync(baseDir: string, executionId: string): Promise<{ success: boolean; error?: string }> { + const store = await getSqliteStore(baseDir); + return store.deleteConversation(executionId); +} + +/** + * Delete execution by ID (sync version - uses cached SQLite module) + */ +export function deleteExecution(baseDir: string, executionId: string): { success: boolean; error?: string } { + try { + const store = getSqliteStoreSync(baseDir); + return store.deleteConversation(executionId); + } catch { + return { success: false, error: 'SQLite store not initialized' }; + } +} + +/** + * Batch delete executions (async) + */ +export async function batchDeleteExecutionsAsync(baseDir: string, ids: string[]): Promise<{ + success: boolean; + deleted: number; + total: number; + errors?: string[]; +}> { + const store = await getSqliteStore(baseDir); + const result = store.batchDelete(ids); + return { ...result, total: ids.length }; +} + +/** + * Get latest execution for a specific tool + */ +export function getLatestExecution(baseDir: string, tool?: string): ExecutionRecord | null { + const history = getExecutionHistory(baseDir, { limit: 1, tool: tool || null }); + if (history.executions.length === 0) { + return null; + } + return getExecutionDetail(baseDir, history.executions[0].id); +} + +// ========== Native Session Content Functions ========== + +/** + * Get native session content by CCW ID + * Parses the native session file and returns full conversation data + */ +export async function getNativeSessionContent(baseDir: string, ccwId: string) { + const store = await getSqliteStore(baseDir); + return store.getNativeSessionContent(ccwId); +} + +/** + * Get formatted native conversation text + */ +export async function getFormattedNativeConversation(baseDir: string, ccwId: string, options?: { + includeThoughts?: boolean; + includeToolCalls?: boolean; + includeTokens?: boolean; + maxContentLength?: number; +}) { + const store = await getSqliteStore(baseDir); + return store.getFormattedNativeConversation(ccwId, options); +} + +/** + * Get conversation pairs from native session + */ +export async function getNativeConversationPairs(baseDir: string, ccwId: string) { + const store = await getSqliteStore(baseDir); + return store.getNativeConversationPairs(ccwId); +} + +/** + * Get enriched conversation (CCW + native session merged) + */ +export async function getEnrichedConversation(baseDir: string, ccwId: string) { + const store = await getSqliteStore(baseDir); + return store.getEnrichedConversation(ccwId); +} + +/** + * Get history with native session info + * Supports recursive querying of child projects + */ +export async function getHistoryWithNativeInfo(baseDir: string, options?: { + limit?: number; + offset?: number; + tool?: string | null; + status?: string | null; + category?: ExecutionCategory | null; + search?: string | null; + recursive?: boolean; +}) { + const { limit = 50, recursive = false, ...queryOptions } = options || {}; + + // Non-recursive mode: query single project + if (!recursive) { + const store = await getSqliteStore(baseDir); + return store.getHistoryWithNativeInfo({ limit, ...queryOptions }); + } + + // Recursive mode: aggregate data from parent and all child projects + const { scanChildProjectsAsync } = await import('../config/storage-paths.js'); + const childProjects = await scanChildProjectsAsync(baseDir); + + // Use the same type as store.getHistoryWithNativeInfo returns + type ExecutionWithNativeAndSource = HistoryIndexEntry & { + hasNativeSession: boolean; + nativeSessionId?: string; + nativeSessionPath?: string; + }; + + const allExecutions: ExecutionWithNativeAndSource[] = []; + let totalCount = 0; + + // Query parent project + try { + const parentStore = await getSqliteStore(baseDir); + const parentResult = parentStore.getHistoryWithNativeInfo({ limit, ...queryOptions }); + totalCount += parentResult.total; + + for (const exec of parentResult.executions) { + allExecutions.push({ ...exec, sourceDir: baseDir }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error); + } + } + + // Query all child projects + for (const child of childProjects) { + try { + const childStore = await getSqliteStore(child.projectPath); + const childResult = childStore.getHistoryWithNativeInfo({ limit, ...queryOptions }); + totalCount += childResult.total; + + for (const exec of childResult.executions) { + allExecutions.push({ ...exec, sourceDir: child.projectPath }); + } + } catch (error) { + if (process.env.DEBUG) { + console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error); + } + } + } + + // Sort by updated_at descending and apply limit + allExecutions.sort((a, b) => { + const timeA = a.updated_at ? new Date(a.updated_at).getTime() : new Date(a.timestamp).getTime(); + const timeB = b.updated_at ? new Date(b.updated_at).getTime() : new Date(b.timestamp).getTime(); + return timeB - timeA; + }); + const limitedExecutions = allExecutions.slice(0, limit); + + return { + total: totalCount, + count: limitedExecutions.length, + executions: limitedExecutions + }; +} diff --git a/ccw/src/tools/cli-executor-utils.ts b/ccw/src/tools/cli-executor-utils.ts new file mode 100644 index 00000000..2c93f9a9 --- /dev/null +++ b/ccw/src/tools/cli-executor-utils.ts @@ -0,0 +1,306 @@ +import { spawn } from 'child_process'; + +// Debug logging utility - check env at runtime for --debug flag support +export function isDebugEnabled(): boolean { + return process.env.DEBUG === 'true' || process.env.DEBUG === '1' || process.env.CCW_DEBUG === 'true'; +} + +export function debugLog(category: string, message: string, data?: Record): void { + if (!isDebugEnabled()) return; + const timestamp = new Date().toISOString(); + const prefix = `[${timestamp}] [CLI-DEBUG] [${category}]`; + if (data) { + console.error(`${prefix} ${message}`, JSON.stringify(data, null, 2)); + } else { + console.error(`${prefix} ${message}`); + } +} + +export function errorLog( + category: string, + message: string, + error?: Error | unknown, + context?: Record +): void { + const timestamp = new Date().toISOString(); + const prefix = `[${timestamp}] [CLI-ERROR] [${category}]`; + console.error(`${prefix} ${message}`); + if (error instanceof Error) { + console.error(`${prefix} Error: ${error.message}`); + if (isDebugEnabled() && error.stack) { + console.error(`${prefix} Stack: ${error.stack}`); + } + } else if (error) { + console.error(`${prefix} Error: ${String(error)}`); + } + if (context) { + console.error(`${prefix} Context:`, JSON.stringify(context, null, 2)); + } +} + +export interface ToolAvailability { + available: boolean; + path: string | null; +} + +// Tool availability cache with TTL +interface CachedToolAvailability { + result: ToolAvailability; + timestamp: number; +} + +// Cache storage: Map +const toolAvailabilityCache = new Map(); +const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes + +function isCacheValid(cached: CachedToolAvailability): boolean { + return Date.now() - cached.timestamp < CACHE_TTL_MS; +} + +function clearExpiredCache(): void { + const now = Date.now(); + const entriesToDelete: string[] = []; + + toolAvailabilityCache.forEach((cached, tool) => { + if (now - cached.timestamp >= CACHE_TTL_MS) { + entriesToDelete.push(tool); + } + }); + + entriesToDelete.forEach((tool) => toolAvailabilityCache.delete(tool)); +} + +export function clearToolCache(): void { + toolAvailabilityCache.clear(); +} + +/** + * Check if a CLI tool is available (with caching) + */ +export async function checkToolAvailability(tool: string): Promise { + debugLog('TOOL_CHECK', `Checking availability for tool: ${tool}`); + + const cached = toolAvailabilityCache.get(tool); + if (cached && isCacheValid(cached)) { + debugLog('TOOL_CHECK', `Cache hit for ${tool}`, { available: cached.result.available, path: cached.result.path }); + return cached.result; + } + + clearExpiredCache(); + + return new Promise((resolve) => { + const isWindows = process.platform === 'win32'; + const command = isWindows ? 'where' : 'which'; + + debugLog('TOOL_CHECK', `Running ${command} ${tool}`, { platform: process.platform }); + + const child = spawn(command, [tool], { + shell: false, + stdio: ['ignore', 'pipe', 'pipe'], + }); + + let stdout = ''; + let stderr = ''; + child.stdout!.on('data', (data) => { + stdout += data.toString(); + }); + child.stderr?.on('data', (data) => { + stderr += data.toString(); + }); + + child.on('close', (code) => { + const result: ToolAvailability = code === 0 && stdout.trim() + ? { available: true, path: stdout.trim().split('\n')[0] } + : { available: false, path: null }; + + if (result.available) { + debugLog('TOOL_CHECK', `Tool ${tool} found`, { path: result.path }); + toolAvailabilityCache.set(tool, { + result, + timestamp: Date.now(), + }); + } else { + debugLog('TOOL_CHECK', `Tool ${tool} not found`, { exitCode: code, stderr: stderr.trim() || '(empty)' }); + } + + resolve(result); + }); + + child.on('error', (error) => { + errorLog('TOOL_CHECK', `Failed to check tool availability: ${tool}`, error, { command, tool }); + resolve({ available: false, path: null }); + }); + + setTimeout(() => { + child.kill(); + debugLog('TOOL_CHECK', `Timeout checking tool ${tool} (5s)`); + resolve({ available: false, path: null }); + }, 5000); + }); +} + +// Native resume configuration +export interface NativeResumeConfig { + enabled: boolean; + sessionId?: string; // Native UUID + isLatest?: boolean; // Use latest/--last flag +} + +/** + * Build command arguments based on tool and options + */ +export function buildCommand(params: { + tool: string; + prompt: string; + mode: string; + model?: string; + dir?: string; + include?: string; + nativeResume?: NativeResumeConfig; +}): { command: string; args: string[]; useStdin: boolean } { + const { tool, prompt, mode = 'analysis', model, dir, include, nativeResume } = params; + + debugLog('BUILD_CMD', `Building command for tool: ${tool}`, { + mode, + model: model || '(default)', + dir: dir || '(cwd)', + include: include || '(none)', + nativeResume: nativeResume + ? { enabled: nativeResume.enabled, isLatest: nativeResume.isLatest, sessionId: nativeResume.sessionId } + : '(none)', + promptLength: prompt.length, + }); + + let command = tool; + let args: string[] = []; + // Default to stdin for all tools to avoid escaping issues on Windows + let useStdin = true; + + switch (tool) { + case 'gemini': + if (nativeResume?.enabled) { + if (nativeResume.isLatest) { + args.push('-r', 'latest'); + } else if (nativeResume.sessionId) { + args.push('-r', nativeResume.sessionId); + } + } + if (model) { + args.push('-m', model); + } + if (mode === 'write') { + args.push('--approval-mode', 'yolo'); + } + if (include) { + args.push('--include-directories', include); + } + break; + + case 'qwen': + if (nativeResume?.enabled) { + if (nativeResume.isLatest) { + args.push('--continue'); + } else if (nativeResume.sessionId) { + args.push('--resume', nativeResume.sessionId); + } + } + if (model) { + args.push('-m', model); + } + if (mode === 'write') { + args.push('--approval-mode', 'yolo'); + } + if (include) { + args.push('--include-directories', include); + } + break; + + case 'codex': + useStdin = true; + if (nativeResume?.enabled) { + args.push('resume'); + if (nativeResume.isLatest) { + args.push('--last'); + } else if (nativeResume.sessionId) { + args.push(nativeResume.sessionId); + } + if (mode === 'write' || mode === 'auto') { + args.push('--dangerously-bypass-approvals-and-sandbox'); + } else { + args.push('--full-auto'); + } + if (model) { + args.push('-m', model); + } + if (include) { + const dirs = include.split(',').map((d) => d.trim()).filter((d) => d); + for (const addDir of dirs) { + args.push('--add-dir', addDir); + } + } + args.push('-'); + } else { + args.push('exec'); + if (mode === 'write' || mode === 'auto') { + args.push('--dangerously-bypass-approvals-and-sandbox'); + } else { + args.push('--full-auto'); + } + if (model) { + args.push('-m', model); + } + if (include) { + const dirs = include.split(',').map((d) => d.trim()).filter((d) => d); + for (const addDir of dirs) { + args.push('--add-dir', addDir); + } + } + args.push('-'); + } + break; + + case 'claude': + // Claude Code: claude -p "prompt" for non-interactive mode + args.push('-p'); // Print mode (non-interactive) + // Native resume: claude --resume or --continue + if (nativeResume?.enabled) { + if (nativeResume.isLatest) { + args.push('--continue'); + } else if (nativeResume.sessionId) { + args.push('--resume', nativeResume.sessionId); + } + } + if (model) { + args.push('--model', model); + } + // Permission modes: write/auto → bypassPermissions, analysis → default + if (mode === 'write' || mode === 'auto') { + args.push('--permission-mode', 'bypassPermissions'); + } else { + args.push('--permission-mode', 'default'); + } + // Output format for better parsing + args.push('--output-format', 'text'); + // Add directories + if (include) { + const dirs = include.split(',').map((d) => d.trim()).filter((d) => d); + for (const addDir of dirs) { + args.push('--add-dir', addDir); + } + } + break; + + default: + errorLog('BUILD_CMD', `Unknown CLI tool: ${tool}`); + throw new Error(`Unknown CLI tool: ${tool}`); + } + + debugLog('BUILD_CMD', `Command built successfully`, { + command, + args, + useStdin, + fullCommand: `${command} ${args.join(' ')}${useStdin ? ' (stdin)' : ''}`, + }); + + return { command, args, useStdin }; +} diff --git a/ccw/src/tools/cli-executor.ts b/ccw/src/tools/cli-executor.ts index 4d94dff9..203496db 100644 --- a/ccw/src/tools/cli-executor.ts +++ b/ccw/src/tools/cli-executor.ts @@ -1,2680 +1,7 @@ /** * CLI Executor Tool - Unified execution for external CLI tools - * Supports Gemini, Qwen, and Codex with streaming output + * + * Thin re-export facade to keep the public API stable. */ -import { z } from 'zod'; -import type { ToolSchema, ToolResult } from '../types/tool.js'; -import type { HistoryIndexEntry } from './cli-history-store.js'; -import { spawn, ChildProcess } from 'child_process'; -import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, readdirSync, statSync } from 'fs'; -import { join, relative } from 'path'; -import { validatePath } from '../utils/path-resolver.js'; - -// Track current running child process for cleanup on interruption -let currentChildProcess: ChildProcess | null = null; - -/** - * Kill the current running CLI child process - * Called when parent process receives SIGINT/SIGTERM - */ -export function killCurrentCliProcess(): boolean { - if (currentChildProcess && !currentChildProcess.killed) { - debugLog('KILL', 'Killing current child process', { pid: currentChildProcess.pid }); - currentChildProcess.kill('SIGTERM'); - // Force kill after 2 seconds if still running - setTimeout(() => { - if (currentChildProcess && !currentChildProcess.killed) { - currentChildProcess.kill('SIGKILL'); - } - }, 2000); - return true; - } - return false; -} - -// Debug logging utility - check env at runtime for --debug flag support -function isDebugEnabled(): boolean { - return process.env.DEBUG === 'true' || process.env.DEBUG === '1' || process.env.CCW_DEBUG === 'true'; -} - -function debugLog(category: string, message: string, data?: Record): void { - if (!isDebugEnabled()) return; - const timestamp = new Date().toISOString(); - const prefix = `[${timestamp}] [CLI-DEBUG] [${category}]`; - if (data) { - console.error(`${prefix} ${message}`, JSON.stringify(data, null, 2)); - } else { - console.error(`${prefix} ${message}`); - } -} - -function errorLog(category: string, message: string, error?: Error | unknown, context?: Record): void { - const timestamp = new Date().toISOString(); - const prefix = `[${timestamp}] [CLI-ERROR] [${category}]`; - console.error(`${prefix} ${message}`); - if (error instanceof Error) { - console.error(`${prefix} Error: ${error.message}`); - if (isDebugEnabled() && error.stack) { - console.error(`${prefix} Stack: ${error.stack}`); - } - } else if (error) { - console.error(`${prefix} Error: ${String(error)}`); - } - if (context) { - console.error(`${prefix} Context:`, JSON.stringify(context, null, 2)); - } -} - -// ========== Unified Stream-JSON Parser ========== - -/** - * Claude CLI stream-json message types - */ -interface ClaudeStreamMessage { - type: 'system' | 'assistant' | 'result' | 'error'; - subtype?: 'init' | 'success' | 'error'; - session_id?: string; - model?: string; - message?: { - content: Array<{ type: 'text'; text: string }>; - }; - result?: string; - total_cost_usd?: number; - usage?: { - input_tokens?: number; - output_tokens?: number; - }; - error?: string; -} - -/** - * Gemini/Qwen CLI stream-json message types - */ -interface GeminiStreamMessage { - type: 'init' | 'message' | 'result'; - timestamp?: string; - session_id?: string; - model?: string; - role?: 'user' | 'assistant'; - content?: string; - delta?: boolean; - status?: 'success' | 'error'; - stats?: { - total_tokens?: number; - input_tokens?: number; - output_tokens?: number; - }; -} - -/** - * Codex CLI JSON message types - */ -interface CodexStreamMessage { - type: 'thread.started' | 'turn.started' | 'item.completed' | 'turn.completed'; - thread_id?: string; - item?: { - type: 'reasoning' | 'agent_message'; - text: string; - }; - usage?: { - input_tokens?: number; - output_tokens?: number; - }; -} - -/** - * Unified Stream-JSON Parser for Claude, Gemini, Qwen, and Codex - * Supports different JSON formats and extracts text, session info, and usage data - */ -class UnifiedStreamParser { - private tool: 'claude' | 'gemini' | 'qwen' | 'codex'; - private lineBuffer = ''; - private extractedText = ''; - private sessionInfo: { session_id?: string; model?: string; thread_id?: string } = {}; - private usageInfo: { cost?: number; tokens?: { input: number; output: number } } = {}; - - constructor(tool: 'claude' | 'gemini' | 'qwen' | 'codex') { - this.tool = tool; - } - - /** - * Process incoming data chunk - * @returns Extracted text to output with message type prefixes - */ - processChunk(data: string): string { - this.lineBuffer += data; - const lines = this.lineBuffer.split('\n'); - - // Keep last incomplete line in buffer - this.lineBuffer = lines.pop() || ''; - - let output = ''; - for (const line of lines) { - const trimmed = line.trim(); - if (!trimmed) continue; - - try { - output += this.parseJsonLine(trimmed); - } catch (err) { - // Not valid JSON or not a stream-json line - pass through as-is - debugLog('STREAM_PARSER', `Non-JSON line (passing through): ${trimmed.substring(0, 100)}`); - output += line + '\n'; - } - } - - return output; - } - - /** - * Parse a single JSON line based on tool type - */ - private parseJsonLine(line: string): string { - switch (this.tool) { - case 'claude': - return this.parseClaudeLine(line); - case 'gemini': - case 'qwen': - return this.parseGeminiQwenLine(line); - case 'codex': - return this.parseCodexLine(line); - default: - return ''; - } - } - - /** - * Parse Claude stream-json format - */ - private parseClaudeLine(line: string): string { - const msg: ClaudeStreamMessage = JSON.parse(line); - let output = ''; - - // Extract session metadata - if (msg.type === 'system' && msg.subtype === 'init') { - this.sessionInfo.session_id = msg.session_id; - this.sessionInfo.model = msg.model; - debugLog('STREAM_PARSER', 'Claude session initialized', this.sessionInfo); - output += `[系统] 会话初始化: ${msg.model || 'unknown'}\n`; - } - - // Extract assistant response text - if (msg.type === 'assistant' && msg.message?.content) { - for (const item of msg.message.content) { - if (item.type === 'text' && item.text && item.text.trim()) { // Filter empty/whitespace-only text - this.extractedText += item.text; - output += `[响应] ${item.text}\n`; // Add newline for proper line separation - } - // Extract content from write_file tool calls (for rules generation) - // Use type assertion to access tool_use properties - const anyItem = item as { type: string; name?: string; input?: { content?: string } }; - if (anyItem.type === 'tool_use' && anyItem.input?.content && typeof anyItem.input.content === 'string') { - const toolName = anyItem.name || ''; - // Check if this is a file write operation - if (toolName.includes('write_file') || toolName.includes('Write')) { - // Use the file content as extracted text (overwrite previous text response) - this.extractedText = anyItem.input.content; - output += `[工具] ${toolName}: 写入文件内容 (${anyItem.input.content.length} 字符)\n`; - } - } - } - } - - // Extract result metadata - if (msg.type === 'result') { - if (msg.total_cost_usd !== undefined) { - this.usageInfo.cost = msg.total_cost_usd; - } - if (msg.usage) { - this.usageInfo.tokens = { - input: msg.usage.input_tokens || 0, - output: msg.usage.output_tokens || 0 - }; - } - debugLog('STREAM_PARSER', 'Claude execution result received', { - subtype: msg.subtype, - cost: this.usageInfo.cost, - tokens: this.usageInfo.tokens - }); - output += `[结果] 状态: ${msg.subtype || 'completed'}\n`; - } - - // Handle errors - if (msg.type === 'error') { - errorLog('STREAM_PARSER', `Claude error in stream: ${msg.error || 'Unknown error'}`); - output += `[错误] ${msg.error || 'Unknown error'}\n`; - } - - return output; - } - - private lastMessageType: string = ''; // Track last message type for delta mode - - /** - * Parse Gemini/Qwen stream-json format - */ - private parseGeminiQwenLine(line: string): string { - const msg: GeminiStreamMessage = JSON.parse(line); - let output = ''; - - // Extract session metadata - if (msg.type === 'init') { - this.sessionInfo.session_id = msg.session_id; - this.sessionInfo.model = msg.model; - debugLog('STREAM_PARSER', `${this.tool} session initialized`, this.sessionInfo); - output += `[系统] 会话初始化: ${msg.model || 'unknown'}\n`; - this.lastMessageType = 'init'; - } - - // Extract assistant message - if (msg.type === 'message' && msg.role === 'assistant' && msg.content) { - const contentText = msg.content.trim(); // Filter empty/whitespace-only content - if (contentText) { - this.extractedText += msg.content; - if (msg.delta) { - // Delta mode: add prefix only for first chunk - if (this.lastMessageType !== 'assistant') { - output += `[响应] ${msg.content}`; - } else { - output += msg.content; - } - } else { - // Full message mode - output += `[响应] ${msg.content}\n`; - } - this.lastMessageType = 'assistant'; - } - } - - // Extract result statistics - if (msg.type === 'result') { - // Add newline before result if last was delta streaming - if (this.lastMessageType === 'assistant') { - output += '\n'; - } - if (msg.stats) { - this.usageInfo.tokens = { - input: msg.stats.input_tokens || 0, - output: msg.stats.output_tokens || 0 - }; - } - debugLog('STREAM_PARSER', `${this.tool} execution result received`, { - status: msg.status, - tokens: this.usageInfo.tokens - }); - output += `[结果] 状态: ${msg.status || 'success'}\n`; - this.lastMessageType = 'result'; - } - - return output; - } - - /** - * Parse Codex JSON format - */ - private parseCodexLine(line: string): string { - const msg: CodexStreamMessage = JSON.parse(line); - let output = ''; - - // Extract thread metadata - if (msg.type === 'thread.started' && msg.thread_id) { - this.sessionInfo.thread_id = msg.thread_id; - debugLog('STREAM_PARSER', 'Codex thread started', { thread_id: msg.thread_id }); - output += `[系统] 线程启动: ${msg.thread_id}\n`; - } - - // Extract reasoning text - if (msg.type === 'item.completed' && msg.item?.type === 'reasoning') { - output += `[思考] ${msg.item.text}\n`; - } - - // Extract agent message - if (msg.type === 'item.completed' && msg.item?.type === 'agent_message') { - this.extractedText += msg.item.text; - output += `[响应] ${msg.item.text}\n`; - } - - // Extract usage statistics - if (msg.type === 'turn.completed' && msg.usage) { - this.usageInfo.tokens = { - input: msg.usage.input_tokens || 0, - output: msg.usage.output_tokens || 0 - }; - debugLog('STREAM_PARSER', 'Codex turn completed', { - tokens: this.usageInfo.tokens - }); - output += `[结果] 回合完成\n`; - } - - return output; - } - - /** - * Flush remaining buffer on stream end - */ - flush(): string { - if (this.lineBuffer.trim()) { - return this.processChunk('\n'); // Force process remaining line - } - return ''; - } - - /** - * Get full extracted text - */ - getExtractedText(): string { - return this.extractedText; - } - - /** - * Get session metadata - */ - getSessionInfo() { - return this.sessionInfo; - } - - /** - * Get usage metadata - */ - getUsageInfo() { - return this.usageInfo; - } -} - -// LiteLLM integration -import { executeLiteLLMEndpoint } from './litellm-executor.js'; -import { findEndpointById } from '../config/litellm-api-config-manager.js'; - -// Native resume support -import { - trackNewSession, - getNativeResumeArgs, - supportsNativeResume, - calculateProjectHash -} from './native-session-discovery.js'; -import { - determineResumeStrategy, - buildContextPrefix, - getResumeModeDescription, - type ResumeDecision -} from './resume-strategy.js'; -import { - isToolEnabled as isToolEnabledFromConfig, - enableTool as enableToolFromConfig, - disableTool as disableToolFromConfig, - getPrimaryModel -} from './cli-config-manager.js'; -import { StoragePaths, ensureStorageDir } from '../config/storage-paths.js'; - -// Lazy-loaded SQLite store module -let sqliteStoreModule: typeof import('./cli-history-store.js') | null = null; - -/** - * Get or initialize SQLite store (async) - */ -async function getSqliteStore(baseDir: string) { - if (!sqliteStoreModule) { - sqliteStoreModule = await import('./cli-history-store.js'); - } - return sqliteStoreModule.getHistoryStore(baseDir); -} - -/** - * Get SQLite store (sync - uses cached module) - */ -function getSqliteStoreSync(baseDir: string) { - if (!sqliteStoreModule) { - throw new Error('SQLite store not initialized. Call an async function first.'); - } - return sqliteStoreModule.getHistoryStore(baseDir); -} - -// Define Zod schema for validation -const ParamsSchema = z.object({ - tool: z.enum(['gemini', 'qwen', 'codex', 'claude']), - prompt: z.string().min(1, 'Prompt is required'), - mode: z.enum(['analysis', 'write', 'auto']).default('analysis'), - format: z.enum(['plain', 'yaml', 'json']).default('plain'), // Multi-turn prompt concatenation format - model: z.string().optional(), - cd: z.string().optional(), - includeDirs: z.string().optional(), - timeout: z.number().default(0), // 0 = no internal timeout, controlled by external caller (e.g., bash timeout) - resume: z.union([z.boolean(), z.string()]).optional(), // true = last, string = single ID or comma-separated IDs - id: z.string().optional(), // Custom execution ID (e.g., IMPL-001-step1) - noNative: z.boolean().optional(), // Force prompt concatenation instead of native resume - category: z.enum(['user', 'internal', 'insight']).default('user'), // Execution category for tracking - parentExecutionId: z.string().optional(), // Parent execution ID for fork/retry scenarios - stream: z.boolean().default(false), // false = cache full output (default), true = stream output via callback -}); - -// Execution category types -export type ExecutionCategory = 'user' | 'internal' | 'insight'; - -type Params = z.infer; - -// Prompt concatenation format types -type PromptFormat = 'plain' | 'yaml' | 'json'; - -interface ToolAvailability { - available: boolean; - path: string | null; -} - -// Tool availability cache with TTL -interface CachedToolAvailability { - result: ToolAvailability; - timestamp: number; -} - -// Cache storage: Map -const toolAvailabilityCache = new Map(); -const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes - -/** - * Check if cache entry is still valid - */ -function isCacheValid(cached: CachedToolAvailability): boolean { - return Date.now() - cached.timestamp < CACHE_TTL_MS; -} - -/** - * Clear expired cache entries - */ -function clearExpiredCache(): void { - const now = Date.now(); - const entriesToDelete: string[] = []; - - toolAvailabilityCache.forEach((cached, tool) => { - if (now - cached.timestamp >= CACHE_TTL_MS) { - entriesToDelete.push(tool); - } - }); - - entriesToDelete.forEach(tool => toolAvailabilityCache.delete(tool)); -} - -/** - * Clear all cache entries (useful for testing or forced refresh) - */ -export function clearToolCache(): void { - toolAvailabilityCache.clear(); -} - -// Single turn in a conversation -interface ConversationTurn { - turn: number; - timestamp: string; - prompt: string; - duration_ms: number; - status: 'success' | 'error' | 'timeout'; - exit_code: number | null; - output: { - stdout: string; - stderr: string; - truncated: boolean; - }; -} - -// Multi-turn conversation record -interface ConversationRecord { - id: string; - created_at: string; - updated_at: string; - tool: string; - model: string; - mode: string; - category: ExecutionCategory; // user | internal | insight - total_duration_ms: number; - turn_count: number; - latest_status: 'success' | 'error' | 'timeout'; - turns: ConversationTurn[]; - parent_execution_id?: string; // For fork/retry scenarios -} - -// Legacy single execution record (for backward compatibility) -interface ExecutionRecord { - id: string; - timestamp: string; - tool: string; - model: string; - mode: string; - prompt: string; - status: 'success' | 'error' | 'timeout'; - exit_code: number | null; - duration_ms: number; - output: { - stdout: string; - stderr: string; - truncated: boolean; - }; -} - -interface HistoryIndex { - version: number; - total_executions: number; - executions: { - id: string; - timestamp: string; // created_at for conversations - updated_at?: string; // last update time - tool: string; - status: string; - duration_ms: number; - turn_count?: number; // number of turns in conversation - prompt_preview: string; - }[]; -} - -interface ExecutionOutput { - success: boolean; - execution: ExecutionRecord; - conversation: ConversationRecord; // Full conversation record - stdout: string; - stderr: string; - parsedOutput?: string; // Parsed output from stream parser (for stream-json tools) -} - -/** - * Check if a CLI tool is available (with caching) - */ -async function checkToolAvailability(tool: string): Promise { - debugLog('TOOL_CHECK', `Checking availability for tool: ${tool}`); - - // Check cache first - const cached = toolAvailabilityCache.get(tool); - if (cached && isCacheValid(cached)) { - debugLog('TOOL_CHECK', `Cache hit for ${tool}`, { available: cached.result.available, path: cached.result.path }); - return cached.result; - } - - // Clear expired entries periodically - clearExpiredCache(); - - // Perform actual check - return new Promise((resolve) => { - const isWindows = process.platform === 'win32'; - const command = isWindows ? 'where' : 'which'; - - debugLog('TOOL_CHECK', `Running ${command} ${tool}`, { platform: process.platform }); - - // Direct spawn - where/which are system commands that don't need shell wrapper - const child = spawn(command, [tool], { - shell: false, - stdio: ['ignore', 'pipe', 'pipe'] - }); - - let stdout = ''; - let stderr = ''; - child.stdout!.on('data', (data) => { stdout += data.toString(); }); - child.stderr?.on('data', (data) => { stderr += data.toString(); }); - - child.on('close', (code) => { - const result: ToolAvailability = code === 0 && stdout.trim() - ? { available: true, path: stdout.trim().split('\n')[0] } - : { available: false, path: null }; - - if (result.available) { - debugLog('TOOL_CHECK', `Tool ${tool} found`, { path: result.path }); - // Only cache positive results to avoid caching transient failures - toolAvailabilityCache.set(tool, { - result, - timestamp: Date.now() - }); - } else { - debugLog('TOOL_CHECK', `Tool ${tool} not found`, { exitCode: code, stderr: stderr.trim() || '(empty)' }); - } - - resolve(result); - }); - - child.on('error', (error) => { - errorLog('TOOL_CHECK', `Failed to check tool availability: ${tool}`, error, { command, tool }); - // Don't cache errors - they may be transient - resolve({ available: false, path: null }); - }); - - // Timeout after 5 seconds - setTimeout(() => { - child.kill(); - debugLog('TOOL_CHECK', `Timeout checking tool ${tool} (5s)`); - // Don't cache timeouts - they may be transient - resolve({ available: false, path: null }); - }, 5000); - }); -} - -// Native resume configuration -interface NativeResumeConfig { - enabled: boolean; - sessionId?: string; // Native UUID - isLatest?: boolean; // Use latest/--last flag -} - -/** - * Build command arguments based on tool and options - */ -function buildCommand(params: { - tool: string; - prompt: string; - mode: string; - model?: string; - dir?: string; - include?: string; - nativeResume?: NativeResumeConfig; -}): { command: string; args: string[]; useStdin: boolean } { - const { tool, prompt, mode = 'analysis', model, dir, include, nativeResume } = params; - - debugLog('BUILD_CMD', `Building command for tool: ${tool}`, { - mode, - model: model || '(default)', - dir: dir || '(cwd)', - include: include || '(none)', - nativeResume: nativeResume ? { enabled: nativeResume.enabled, isLatest: nativeResume.isLatest, sessionId: nativeResume.sessionId } : '(none)', - promptLength: prompt.length - }); - - let command = tool; - let args: string[] = []; - // Default to stdin for all tools to avoid escaping issues on Windows - let useStdin = true; - - switch (tool) { - case 'gemini': - // Native resume: gemini -r or -r latest - if (nativeResume?.enabled) { - if (nativeResume.isLatest) { - args.push('-r', 'latest'); - } else if (nativeResume.sessionId) { - args.push('-r', nativeResume.sessionId); - } - } - if (model) { - args.push('-m', model); - } - if (mode === 'write') { - args.push('--approval-mode', 'yolo'); - } - if (include) { - args.push('--include-directories', include); - } - // Enable stream-json output for unified parsing - args.push('--output-format', 'stream-json'); - break; - - case 'qwen': - // Native resume: qwen --continue (latest) or --resume - if (nativeResume?.enabled) { - if (nativeResume.isLatest) { - args.push('--continue'); - } else if (nativeResume.sessionId) { - args.push('--resume', nativeResume.sessionId); - } - } - if (model) { - args.push('-m', model); - } - if (mode === 'write') { - args.push('--approval-mode', 'yolo'); - } - if (include) { - args.push('--include-directories', include); - } - // Enable stream-json output for unified parsing - args.push('--output-format', 'stream-json'); - break; - - case 'codex': - // Codex supports stdin when using `-` as prompt argument - // Using stdin avoids Windows command line escaping issues with multi-line/special char prompts - useStdin = true; - // Native resume: codex resume [prompt] or --last - if (nativeResume?.enabled) { - args.push('resume'); - if (nativeResume.isLatest) { - args.push('--last'); - } else if (nativeResume.sessionId) { - args.push(nativeResume.sessionId); - } - // Codex resume still supports additional flags - // Note: -C is NOT used because spawn's cwd already sets the working directory - // Using both would cause path to be applied twice (e.g., codex-lens/codex-lens) - // Permission configuration based on mode: - // - analysis: --full-auto (read-only sandbox, no prompts) - safer for read operations - // - write/auto: --dangerously-bypass-approvals-and-sandbox (full access for modifications) - if (mode === 'write' || mode === 'auto') { - args.push('--dangerously-bypass-approvals-and-sandbox'); - } else { - args.push('--full-auto'); - } - if (model) { - args.push('-m', model); - } - if (include) { - const dirs = include.split(',').map(d => d.trim()).filter(d => d); - for (const addDir of dirs) { - args.push('--add-dir', addDir); - } - } - // Enable JSON output for unified parsing - args.push('--json'); - // Use `-` to indicate reading prompt from stdin - args.push('-'); - } else { - // Standard exec mode - args.push('exec'); - // Note: -C is NOT used because spawn's cwd already sets the working directory - // Using both would cause path to be applied twice (e.g., codex-lens/codex-lens) - // Permission configuration based on mode: - // - analysis: --full-auto (read-only sandbox, no prompts) - safer for read operations - // - write/auto: --dangerously-bypass-approvals-and-sandbox (full access for modifications) - if (mode === 'write' || mode === 'auto') { - args.push('--dangerously-bypass-approvals-and-sandbox'); - } else { - args.push('--full-auto'); - } - if (model) { - args.push('-m', model); - } - if (include) { - const dirs = include.split(',').map(d => d.trim()).filter(d => d); - for (const addDir of dirs) { - args.push('--add-dir', addDir); - } - } - // Enable JSON output for unified parsing - args.push('--json'); - // Use `-` to indicate reading prompt from stdin (avoids Windows escaping issues) - args.push('-'); - } - break; - - case 'claude': - // Claude Code: claude -p "prompt" for non-interactive mode - args.push('-p'); // Print mode (non-interactive) - // Native resume: claude --resume or --continue - if (nativeResume?.enabled) { - if (nativeResume.isLatest) { - args.push('--continue'); - } else if (nativeResume.sessionId) { - args.push('--resume', nativeResume.sessionId); - } - } - if (model) { - args.push('--model', model); - } - // Permission modes: write/auto → bypassPermissions, analysis → default - if (mode === 'write' || mode === 'auto') { - args.push('--permission-mode', 'bypassPermissions'); - } else { - args.push('--permission-mode', 'default'); - } - // Output format: stream-json for real-time parsing, text for backward compatibility - args.push('--output-format', 'stream-json'); - args.push('--verbose'); // Required for stream-json format - // Add directories - if (include) { - const dirs = include.split(',').map(d => d.trim()).filter(d => d); - for (const addDir of dirs) { - args.push('--add-dir', addDir); - } - } - break; - - default: - errorLog('BUILD_CMD', `Unknown CLI tool: ${tool}`); - throw new Error(`Unknown CLI tool: ${tool}`); - } - - debugLog('BUILD_CMD', `Command built successfully`, { - command, - args, - useStdin, - fullCommand: `${command} ${args.join(' ')}${useStdin ? ' (stdin)' : ''}` - }); - - return { command, args, useStdin }; -} - -/** - * Ensure history directory exists (uses centralized storage) - */ -function ensureHistoryDir(baseDir: string): string { - const paths = StoragePaths.project(baseDir); - ensureStorageDir(paths.cliHistory); - return paths.cliHistory; -} - -/** - * Save conversation to SQLite - * @param baseDir - Project base directory (NOT historyDir) - */ -async function saveConversationAsync(baseDir: string, conversation: ConversationRecord): Promise { - const store = await getSqliteStore(baseDir); - store.saveConversation(conversation); -} - -/** - * Sync wrapper for saveConversation (uses cached SQLite module) - * @param baseDir - Project base directory (NOT historyDir) - */ -function saveConversation(baseDir: string, conversation: ConversationRecord): void { - try { - const store = getSqliteStoreSync(baseDir); - store.saveConversation(conversation); - } catch { - // If sync not available, queue for async save - saveConversationAsync(baseDir, conversation).catch(err => { - console.error('[CLI Executor] Failed to save conversation:', err.message); - }); - } -} - -/** - * Load existing conversation by ID from SQLite - * @param baseDir - Project base directory (NOT historyDir) - */ -async function loadConversationAsync(baseDir: string, conversationId: string): Promise { - const store = await getSqliteStore(baseDir); - return store.getConversation(conversationId); -} - -/** - * Sync wrapper for loadConversation (uses cached SQLite module) - * @param baseDir - Project base directory (NOT historyDir) - */ -function loadConversation(baseDir: string, conversationId: string): ConversationRecord | null { - try { - const store = getSqliteStoreSync(baseDir); - return store.getConversation(conversationId); - } catch { - // SQLite not initialized yet, return null - return null; - } -} - -/** - * Convert legacy ExecutionRecord to ConversationRecord - */ -function convertToConversation(record: ExecutionRecord): ConversationRecord { - return { - id: record.id, - created_at: record.timestamp, - updated_at: record.timestamp, - tool: record.tool, - model: record.model, - mode: record.mode, - category: 'user', // Legacy records default to user category - total_duration_ms: record.duration_ms, - turn_count: 1, - latest_status: record.status, - turns: [{ - turn: 1, - timestamp: record.timestamp, - prompt: record.prompt, - duration_ms: record.duration_ms, - status: record.status, - exit_code: record.exit_code, - output: record.output - }] - }; -} - -/** - * Merge multiple conversations into a unified context - * Returns merged turns sorted by timestamp with source tracking - */ -interface MergedTurn extends ConversationTurn { - source_id: string; // Original conversation ID -} - -interface MergeResult { - mergedTurns: MergedTurn[]; - sourceConversations: ConversationRecord[]; - totalDuration: number; -} - -function mergeConversations(conversations: ConversationRecord[]): MergeResult { - const mergedTurns: MergedTurn[] = []; - - // Collect all turns with source tracking - for (const conv of conversations) { - for (const turn of conv.turns) { - mergedTurns.push({ - ...turn, - source_id: conv.id - }); - } - } - - // Sort by timestamp - mergedTurns.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()); - - // Re-number turns - mergedTurns.forEach((turn, idx) => { - turn.turn = idx + 1; - }); - - // Calculate total duration - const totalDuration = mergedTurns.reduce((sum, t) => sum + t.duration_ms, 0); - - return { - mergedTurns, - sourceConversations: conversations, - totalDuration - }; -} - -/** - * Build prompt from merged conversations - */ -function buildMergedPrompt( - mergeResult: MergeResult, - newPrompt: string, - format: PromptFormat = 'plain' -): string { - const concatenator = createPromptConcatenator({ format }); - - // Set metadata for merged conversations - concatenator.setMetadata( - 'merged_sources', - mergeResult.sourceConversations.map(c => c.id).join(', ') - ); - - // Add all merged turns with source tracking - for (const turn of mergeResult.mergedTurns) { - concatenator.addFromConversationTurn(turn, turn.source_id); - } - - return concatenator.build(newPrompt); -} - -/** - * Execute CLI tool with streaming output - */ -async function executeCliTool( - params: Record, - onOutput?: ((data: { type: string; data: string }) => void) | null -): Promise { - const parsed = ParamsSchema.safeParse(params); - if (!parsed.success) { - throw new Error(`Invalid params: ${parsed.error.message}`); - } - - const { tool, prompt, mode, format, model, cd, includeDirs, timeout, resume, id: customId, noNative, category, parentExecutionId } = parsed.data; - - // Validate and determine working directory early (needed for conversation lookup) - let workingDir: string; - if (cd) { - const validation = validatePath(cd, { mustExist: true }); - if (!validation.valid) { - throw new Error(`Invalid working directory (--cd): ${validation.error}. Path: ${cd}`); - } - workingDir = validation.path!; - } else { - workingDir = process.cwd(); - } - ensureHistoryDir(workingDir); // Ensure history directory exists - - // NEW: Check if model is a custom LiteLLM endpoint ID - if (model) { - const endpoint = findEndpointById(workingDir, model); - if (endpoint) { - // Route to LiteLLM executor - if (onOutput) { - onOutput({ type: 'stderr', data: `[Routing to LiteLLM endpoint: ${model}]\n` }); - } - - const result = await executeLiteLLMEndpoint({ - prompt, - endpointId: model, - baseDir: workingDir, - cwd: cd, - includeDirs: includeDirs ? includeDirs.split(',').map(d => d.trim()) : undefined, - enableCache: true, - onOutput: onOutput || undefined, - }); - - // Convert LiteLLM result to ExecutionOutput format - const startTime = Date.now(); - const endTime = Date.now(); - const duration = endTime - startTime; - - const execution: ExecutionRecord = { - id: customId || `${Date.now()}-litellm`, - timestamp: new Date(startTime).toISOString(), - tool: 'litellm', - model: result.model, - mode, - prompt, - status: result.success ? 'success' : 'error', - exit_code: result.success ? 0 : 1, - duration_ms: duration, - output: { - stdout: result.output, - stderr: result.error || '', - truncated: false, - }, - }; - - const conversation = convertToConversation(execution); - - // Try to save to history - try { - saveConversation(workingDir, conversation); - } catch (err) { - console.error('[CLI Executor] Failed to save LiteLLM history:', (err as Error).message); - } - - return { - success: result.success, - execution, - conversation, - stdout: result.output, - stderr: result.error || '', - }; - } - } - - // Get SQLite store for native session lookup - const store = await getSqliteStore(workingDir); - - // Determine conversation ID and load existing conversation - // Logic: - // - If --resume (multiple IDs): merge conversations - // - With --id: create new merged conversation - // - Without --id: append to ALL source conversations - // - If --resume AND --id : fork - read context from resume ID, create new conversation with newId - // - If --id provided (no resume): use that ID (create new or append) - // - If --resume without --id: use resume ID (append to existing) - // - No params: create new with auto-generated ID - let conversationId: string; - let existingConversation: ConversationRecord | null = null; - let contextConversation: ConversationRecord | null = null; // For fork scenario - let mergeResult: MergeResult | null = null; // For merge scenario - let sourceConversations: ConversationRecord[] = []; // All source conversations for merge - - // Parse resume IDs (can be comma-separated for merge) - const resumeIds: string[] = resume - ? (typeof resume === 'string' ? resume.split(',').map(id => id.trim()).filter(Boolean) : []) - : []; - const isMerge = resumeIds.length > 1; - const resumeId = resumeIds.length === 1 ? resumeIds[0] : null; - - if (isMerge) { - // Merge scenario: multiple resume IDs - sourceConversations = resumeIds - .map(id => loadConversation(workingDir, id)) - .filter((c): c is ConversationRecord => c !== null); - - if (sourceConversations.length === 0) { - throw new Error('No valid conversations found for merge'); - } - - mergeResult = mergeConversations(sourceConversations); - - if (customId) { - // Create new merged conversation with custom ID - conversationId = customId; - existingConversation = loadConversation(workingDir, customId); - } else { - // Will append to ALL source conversations (handled in save logic) - // Use first source conversation ID as primary - conversationId = sourceConversations[0].id; - existingConversation = sourceConversations[0]; - } - } else if (customId && resumeId) { - // Fork: read context from resume ID, but create new conversation with custom ID - conversationId = customId; - contextConversation = loadConversation(workingDir, resumeId); - existingConversation = loadConversation(workingDir, customId); - } else if (customId) { - // Use custom ID - may be new or existing - conversationId = customId; - existingConversation = loadConversation(workingDir, customId); - } else if (resumeId) { - // Resume single ID without new ID - append to existing conversation - conversationId = resumeId; - existingConversation = loadConversation(workingDir, resumeId); - } else if (resume) { - // resume=true: get last conversation for this tool - const history = getExecutionHistory(workingDir, { limit: 1, tool }); - if (history.executions.length > 0) { - conversationId = history.executions[0].id; - existingConversation = loadConversation(workingDir, conversationId); - } else { - // No previous conversation, create new - conversationId = `${Date.now()}-${tool}`; - } - } else { - // New conversation with auto-generated ID - conversationId = `${Date.now()}-${tool}`; - } - - // Determine resume strategy (native vs prompt-concat vs hybrid) - let resumeDecision: ResumeDecision | null = null; - let nativeResumeConfig: NativeResumeConfig | undefined; - - // resume=true (latest) - use native latest if supported - if (resume === true && !noNative && supportsNativeResume(tool)) { - resumeDecision = { - strategy: 'native', - isLatest: true, - primaryConversationId: conversationId - }; - } - // Use strategy engine for complex scenarios - else if (resumeIds.length > 0 && !noNative) { - resumeDecision = determineResumeStrategy({ - tool, - resumeIds, - customId, - forcePromptConcat: noNative, - getNativeSessionId: (ccwId) => store.getNativeSessionId(ccwId), - getConversation: (ccwId) => loadConversation(workingDir, ccwId), - getConversationTool: (ccwId) => { - const conv = loadConversation(workingDir, ccwId); - return conv?.tool || null; - } - }); - } - - // Configure native resume if strategy decided to use it - if (resumeDecision && (resumeDecision.strategy === 'native' || resumeDecision.strategy === 'hybrid')) { - nativeResumeConfig = { - enabled: true, - sessionId: resumeDecision.nativeSessionId, - isLatest: resumeDecision.isLatest - }; - } - - // Build final prompt with conversation context - // For native: minimal prompt (native tool handles context) - // For hybrid: context prefix from other conversations + new prompt - // For prompt-concat: full multi-turn prompt - let finalPrompt = prompt; - - if (resumeDecision?.strategy === 'native') { - // Native mode: just use the new prompt, tool handles context - finalPrompt = prompt; - } else if (resumeDecision?.strategy === 'hybrid' && resumeDecision.contextTurns?.length) { - // Hybrid mode: add context prefix from other conversations - const contextPrefix = buildContextPrefix(resumeDecision.contextTurns, format); - finalPrompt = contextPrefix + prompt; - } else if (mergeResult && mergeResult.mergedTurns.length > 0) { - // Full merge: use merged prompt - finalPrompt = buildMergedPrompt(mergeResult, prompt, format); - } else { - // Standard prompt-concat - const conversationForContext = contextConversation || existingConversation; - if (conversationForContext && conversationForContext.turns.length > 0) { - finalPrompt = buildMultiTurnPrompt(conversationForContext, prompt, format); - } - } - - // Check tool availability - const toolStatus = await checkToolAvailability(tool); - if (!toolStatus.available) { - throw new Error(`CLI tool not available: ${tool}. Please ensure it is installed and in PATH.`); - } - - // Log resume mode for debugging - if (resumeDecision) { - const modeDesc = getResumeModeDescription(resumeDecision); - if (onOutput) { - onOutput({ type: 'stderr', data: `[Resume mode: ${modeDesc}]\n` }); - } - } - - // Use configured primary model if no explicit model provided - const effectiveModel = model || getPrimaryModel(workingDir, tool); - - // Build command - const { command, args, useStdin } = buildCommand({ - tool, - prompt: finalPrompt, - mode, - model: effectiveModel, - dir: cd, - include: includeDirs, - nativeResume: nativeResumeConfig - }); - - const startTime = Date.now(); - - debugLog('EXEC', `Starting CLI execution`, { - tool, - mode, - workingDir, - conversationId, - promptLength: finalPrompt.length, - hasResume: !!resume, - hasCustomId: !!customId - }); - - return new Promise((resolve, reject) => { - // Windows requires shell: true for npm global commands (.cmd files) - // Unix-like systems can use shell: false for direct execution - const isWindows = process.platform === 'win32'; - - debugLog('SPAWN', `Spawning process`, { - command, - args, - cwd: workingDir, - shell: isWindows, - useStdin, - platform: process.platform, - fullCommand: `${command} ${args.join(' ')}` - }); - - const child = spawn(command, args, { - cwd: workingDir, - shell: isWindows, // Enable shell on Windows for .cmd files - stdio: [useStdin ? 'pipe' : 'ignore', 'pipe', 'pipe'] - }); - - // Track current child process for cleanup on interruption - currentChildProcess = child; - - debugLog('SPAWN', `Process spawned`, { pid: child.pid }); - - // Write prompt to stdin if using stdin mode (for gemini/qwen) - if (useStdin && child.stdin) { - debugLog('STDIN', `Writing prompt to stdin (${finalPrompt.length} bytes)`); - child.stdin.write(finalPrompt); - child.stdin.end(); - } - - let stdout = ''; - let stderr = ''; - let timedOut = false; - - // Initialize unified stream parser for all tools - const streamParser = ['claude', 'gemini', 'qwen', 'codex'].includes(tool) - ? new UnifiedStreamParser(tool as 'claude' | 'gemini' | 'qwen' | 'codex') - : null; - - // Handle stdout - child.stdout!.on('data', (data) => { - const text = data.toString(); - stdout += text; - - // Parse stream-json for all supported tools - // Always process chunks to populate extractedText, even without onOutput callback - if (streamParser) { - const parsedText = streamParser.processChunk(text); - if (parsedText && onOutput) { - onOutput({ type: 'stdout', data: parsedText }); - } - } else if (onOutput) { - onOutput({ type: 'stdout', data: text }); - } - }); - - // Handle stderr - child.stderr!.on('data', (data) => { - const text = data.toString(); - stderr += text; - if (onOutput) { - onOutput({ type: 'stderr', data: text }); - } - }); - - // Handle completion - child.on('close', async (code) => { - // Clear current child process reference - currentChildProcess = null; - - // Flush unified parser buffer if present - // Always flush to capture remaining content, even without onOutput callback - if (streamParser) { - const remaining = streamParser.flush(); - if (remaining && onOutput) { - onOutput({ type: 'stdout', data: remaining }); - } - - // Log usage information if available - const usageInfo = streamParser.getUsageInfo(); - if (usageInfo.cost !== undefined || usageInfo.tokens) { - debugLog('STREAM_USAGE', `${tool} execution usage`, { - cost_usd: usageInfo.cost, - tokens: usageInfo.tokens - }); - } - } - - const endTime = Date.now(); - const duration = endTime - startTime; - - debugLog('CLOSE', `Process closed`, { - exitCode: code, - duration: `${duration}ms`, - timedOut, - stdoutLength: stdout.length, - stderrLength: stderr.length - }); - - // Determine status - prioritize output content over exit code - let status: 'success' | 'error' | 'timeout' = 'success'; - if (timedOut) { - status = 'timeout'; - debugLog('STATUS', `Execution timed out after ${duration}ms`); - } else if (code !== 0) { - // Non-zero exit code doesn't always mean failure - // Check if there's valid output (AI response) - treat as success - const hasValidOutput = stdout.trim().length > 0; - const hasFatalError = stderr.includes('FATAL') || - stderr.includes('Authentication failed') || - stderr.includes('API key') || - stderr.includes('rate limit exceeded'); - - debugLog('STATUS', `Non-zero exit code analysis`, { - exitCode: code, - hasValidOutput, - hasFatalError, - stderrPreview: stderr.substring(0, 500) - }); - - if (hasValidOutput && !hasFatalError) { - // Has output and no fatal errors - treat as success despite exit code - status = 'success'; - debugLog('STATUS', `Treating as success (has valid output, no fatal errors)`); - } else { - status = 'error'; - errorLog('EXEC', `CLI execution failed`, undefined, { - exitCode: code, - tool, - command, - args, - workingDir, - stderrFull: stderr, - stdoutPreview: stdout.substring(0, 200) - }); - } - } else { - debugLog('STATUS', `Execution successful (exit code 0)`); - } - - // Create new turn - cache full output when not streaming (default) - const shouldCache = !parsed.data.stream; - const newTurnOutput = { - stdout: stdout.substring(0, 10240), // Truncate preview to 10KB - stderr: stderr.substring(0, 2048), // Truncate preview to 2KB - truncated: stdout.length > 10240 || stderr.length > 2048, - cached: shouldCache, - stdout_full: shouldCache ? stdout : undefined, - stderr_full: shouldCache ? stderr : undefined - }; - - // Determine base turn number for merge scenarios - const baseTurnNumber = isMerge && mergeResult - ? mergeResult.mergedTurns.length + 1 - : (existingConversation ? existingConversation.turns.length + 1 : 1); - - const newTurn: ConversationTurn = { - turn: baseTurnNumber, - timestamp: new Date(startTime).toISOString(), - prompt, - duration_ms: duration, - status, - exit_code: code, - output: newTurnOutput - }; - - // Create or update conversation record - let conversation: ConversationRecord; - - if (isMerge && mergeResult && !customId) { - // Merge without --id: append to ALL source conversations - // Save new turn to each source conversation - const savedConversations: ConversationRecord[] = []; - for (const srcConv of sourceConversations) { - const turnForSrc: ConversationTurn = { - ...newTurn, - turn: srcConv.turns.length + 1 // Use each conversation's turn count - }; - const updatedConv: ConversationRecord = { - ...srcConv, - updated_at: new Date().toISOString(), - total_duration_ms: srcConv.total_duration_ms + duration, - turn_count: srcConv.turns.length + 1, - latest_status: status, - turns: [...srcConv.turns, turnForSrc] - }; - savedConversations.push(updatedConv); - } - // Use first conversation as primary - conversation = savedConversations[0]; - // Save all source conversations - try { - for (const conv of savedConversations) { - saveConversation(workingDir, conv); - } - } catch (err) { - console.error('[CLI Executor] Failed to save merged histories:', (err as Error).message); - } - } else if (isMerge && mergeResult && customId) { - // Merge with --id: create new conversation with merged turns + new turn - // Convert merged turns to regular turns (without source_id) - const mergedTurns: ConversationTurn[] = mergeResult.mergedTurns.map((mt, idx) => ({ - turn: idx + 1, - timestamp: mt.timestamp, - prompt: mt.prompt, - duration_ms: mt.duration_ms, - status: mt.status, - exit_code: mt.exit_code, - output: mt.output - })); - - conversation = existingConversation - ? { - ...existingConversation, - updated_at: new Date().toISOString(), - total_duration_ms: existingConversation.total_duration_ms + duration, - turn_count: existingConversation.turns.length + 1, - latest_status: status, - turns: [...existingConversation.turns, newTurn] - } - : { - id: conversationId, - created_at: new Date(startTime).toISOString(), - updated_at: new Date().toISOString(), - tool, - model: model || 'default', - mode, - category, - total_duration_ms: mergeResult.totalDuration + duration, - turn_count: mergedTurns.length + 1, - latest_status: status, - turns: [...mergedTurns, newTurn] - }; - // Save merged conversation - try { - saveConversation(workingDir, conversation); - } catch (err) { - console.error('[CLI Executor] Failed to save merged conversation:', (err as Error).message); - } - } else { - // Normal scenario: single conversation - conversation = existingConversation - ? { - ...existingConversation, - updated_at: new Date().toISOString(), - total_duration_ms: existingConversation.total_duration_ms + duration, - turn_count: existingConversation.turns.length + 1, - latest_status: status, - turns: [...existingConversation.turns, newTurn] - } - : { - id: conversationId, - created_at: new Date(startTime).toISOString(), - updated_at: new Date().toISOString(), - tool, - model: model || 'default', - mode, - category, - total_duration_ms: duration, - turn_count: 1, - latest_status: status, - turns: [newTurn], - parent_execution_id: parentExecutionId - }; - // Try to save conversation to history - try { - saveConversation(workingDir, conversation); - } catch (err) { - // Non-fatal: continue even if history save fails - console.error('[CLI Executor] Failed to save history:', (err as Error).message); - } - } - - // Track native session after execution (awaited to prevent process hang) - // Pass prompt for precise matching in parallel execution scenarios - try { - const nativeSession = await trackNewSession(tool, new Date(startTime), workingDir, prompt); - if (nativeSession) { - // Save native session mapping - try { - store.saveNativeSessionMapping({ - ccw_id: conversationId, - tool, - native_session_id: nativeSession.sessionId, - native_session_path: nativeSession.filePath, - project_hash: nativeSession.projectHash, - created_at: new Date().toISOString() - }); - } catch (err) { - console.error('[CLI Executor] Failed to save native session mapping:', (err as Error).message); - } - } - } catch (err) { - console.error('[CLI Executor] Failed to track native session:', (err as Error).message); - } - - // Create legacy execution record for backward compatibility - const execution: ExecutionRecord = { - id: conversationId, - timestamp: new Date(startTime).toISOString(), - tool, - model: model || 'default', - mode, - prompt, - status, - exit_code: code, - duration_ms: duration, - output: newTurnOutput - }; - - resolve({ - success: status === 'success', - execution, - conversation, - stdout, - stderr, - parsedOutput: streamParser?.getExtractedText() || undefined - }); - }); - - // Handle errors - child.on('error', (error) => { - errorLog('SPAWN', `Failed to spawn process`, error, { - tool, - command, - args, - workingDir, - fullCommand: `${command} ${args.join(' ')}`, - platform: process.platform, - path: process.env.PATH?.split(process.platform === 'win32' ? ';' : ':').slice(0, 10).join('\n ') + '...' - }); - reject(new Error(`Failed to spawn ${tool}: ${error.message}\n Command: ${command} ${args.join(' ')}\n Working Dir: ${workingDir}`)); - }); - - // Timeout handling (timeout=0 disables internal timeout, controlled by external caller) - let timeoutId: NodeJS.Timeout | null = null; - if (timeout > 0) { - timeoutId = setTimeout(() => { - timedOut = true; - child.kill('SIGTERM'); - setTimeout(() => { - if (!child.killed) { - child.kill('SIGKILL'); - } - }, 5000); - }, timeout); - } - - child.on('close', () => { - if (timeoutId) { - clearTimeout(timeoutId); - } - }); - }); -} - -// Tool schema for MCP -export const schema: ToolSchema = { - name: 'cli_executor', - description: `Execute external CLI tools (gemini/qwen/codex) with unified interface. -Modes: -- analysis: Read-only operations (default) -- write: File modifications allowed -- auto: Full autonomous operations (codex only)`, - inputSchema: { - type: 'object', - properties: { - tool: { - type: 'string', - enum: ['gemini', 'qwen', 'codex'], - description: 'CLI tool to execute' - }, - prompt: { - type: 'string', - description: 'Prompt to send to the CLI tool' - }, - mode: { - type: 'string', - enum: ['analysis', 'write', 'auto'], - description: 'Execution mode (default: analysis)', - default: 'analysis' - }, - model: { - type: 'string', - description: 'Model override (tool-specific)' - }, - cd: { - type: 'string', - description: 'Working directory for execution (-C for codex)' - }, - includeDirs: { - type: 'string', - description: 'Additional directories (comma-separated). Maps to --include-directories for gemini/qwen, --add-dir for codex' - }, - timeout: { - type: 'number', - description: 'Timeout in milliseconds (default: 0 = disabled, controlled by external caller)', - default: 0 - } - }, - required: ['tool', 'prompt'] - } -}; - -// Handler function -export async function handler(params: Record): Promise> { - try { - const result = await executeCliTool(params); - return { - success: result.success, - result - }; - } catch (error) { - return { - success: false, - error: `CLI execution failed: ${(error as Error).message}` - }; - } -} - -/** - * Find all project directories with CLI history in centralized storage - * Returns list of project base directories (NOT history directories) - */ -function findProjectsWithHistory(): string[] { - const projectDirs: string[] = []; - const projectsRoot = join(StoragePaths.global.root(), 'projects'); - - if (!existsSync(projectsRoot)) { - return projectDirs; - } - - try { - const entries = readdirSync(projectsRoot, { withFileTypes: true }); - for (const entry of entries) { - if (entry.isDirectory()) { - const paths = StoragePaths.projectById(entry.name); - if (existsSync(paths.historyDb)) { - // Return project ID as identifier (actual project path is hashed) - projectDirs.push(entry.name); - } - } - } - } catch { - // Ignore permission errors - } - - return projectDirs; -} - -/** - * Get execution history from SQLite (centralized storage) - */ -export async function getExecutionHistoryAsync(baseDir: string, options: { - limit?: number; - tool?: string | null; - status?: string | null; - category?: ExecutionCategory | null; - search?: string | null; - recursive?: boolean; -} = {}): Promise<{ - total: number; - count: number; - executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[]; -}> { - const { limit = 50, tool = null, status = null, category = null, search = null, recursive = false } = options; - - // Recursive mode: aggregate data from parent and all child projects - if (recursive) { - const { scanChildProjectsAsync } = await import('../config/storage-paths.js'); - const childProjects = await scanChildProjectsAsync(baseDir); - - let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = []; - let totalCount = 0; - - // Query parent project - apply limit at source to reduce memory footprint - try { - const parentStore = await getSqliteStore(baseDir); - const parentResult = parentStore.getHistory({ limit, tool, status, category, search }); - totalCount += parentResult.total; - - for (const exec of parentResult.executions) { - allExecutions.push({ ...exec, sourceDir: baseDir }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error); - } - } - - // Query all child projects - apply limit to each child - for (const child of childProjects) { - try { - const childStore = await getSqliteStore(child.projectPath); - const childResult = childStore.getHistory({ limit, tool, status, category, search }); - totalCount += childResult.total; - - for (const exec of childResult.executions) { - allExecutions.push({ - ...exec, - sourceDir: child.relativePath // Show relative path for clarity - }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error); - } - } - } - - // Sort by timestamp (newest first) and apply limit - allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp)); - const limitedExecutions = allExecutions.slice(0, limit); - - return { - total: totalCount, - count: limitedExecutions.length, - executions: limitedExecutions - }; - } - - // Non-recursive mode: only query current project - const store = await getSqliteStore(baseDir); - return store.getHistory({ limit, tool, status, category, search }); -} - -/** - * Get execution history (sync version - uses cached SQLite module) - */ -export function getExecutionHistory(baseDir: string, options: { - limit?: number; - tool?: string | null; - status?: string | null; - recursive?: boolean; -} = {}): { - total: number; - count: number; - executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[]; -} { - const { limit = 50, tool = null, status = null, recursive = false } = options; - - try { - if (recursive) { - const { scanChildProjects } = require('../config/storage-paths.js'); - const childProjects = scanChildProjects(baseDir); - - let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = []; - let totalCount = 0; - - // Query parent project - apply limit at source - try { - const parentStore = getSqliteStoreSync(baseDir); - const parentResult = parentStore.getHistory({ limit, tool, status }); - totalCount += parentResult.total; - - for (const exec of parentResult.executions) { - allExecutions.push({ ...exec, sourceDir: baseDir }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History Sync] Failed to query parent project ${baseDir}:`, error); - } - } - - // Query all child projects - apply limit to each child - for (const child of childProjects) { - try { - const childStore = getSqliteStoreSync(child.projectPath); - const childResult = childStore.getHistory({ limit, tool, status }); - totalCount += childResult.total; - - for (const exec of childResult.executions) { - allExecutions.push({ - ...exec, - sourceDir: child.relativePath - }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History Sync] Failed to query child project ${child.projectPath}:`, error); - } - } - } - - // Sort by timestamp (newest first) and apply limit - allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp)); - - return { - total: totalCount, - count: Math.min(allExecutions.length, limit), - executions: allExecutions.slice(0, limit) - }; - } - - const store = getSqliteStoreSync(baseDir); - return store.getHistory({ limit, tool, status }); - } catch { - // SQLite not initialized, return empty - return { total: 0, count: 0, executions: [] }; - } -} - -/** - * Get conversation detail by ID (returns ConversationRecord) - */ -export function getConversationDetail(baseDir: string, conversationId: string): ConversationRecord | null { - // Pass baseDir directly - loadConversation will resolve the correct storage path - return loadConversation(baseDir, conversationId); -} - -/** - * Get conversation detail with native session info - */ -export function getConversationDetailWithNativeInfo(baseDir: string, conversationId: string) { - try { - const store = getSqliteStoreSync(baseDir); - return store.getConversationWithNativeInfo(conversationId); - } catch { - // SQLite not initialized, return null - return null; - } -} - -/** - * Get execution detail by ID (legacy, returns ExecutionRecord for backward compatibility) - */ -export function getExecutionDetail(baseDir: string, executionId: string): ExecutionRecord | null { - const conversation = getConversationDetail(baseDir, executionId); - if (!conversation) return null; - - // Convert to legacy ExecutionRecord format (using latest turn) - const latestTurn = conversation.turns[conversation.turns.length - 1]; - return { - id: conversation.id, - timestamp: conversation.created_at, - tool: conversation.tool, - model: conversation.model, - mode: conversation.mode, - prompt: latestTurn.prompt, - status: conversation.latest_status, - exit_code: latestTurn.exit_code, - duration_ms: conversation.total_duration_ms, - output: latestTurn.output - }; -} - -/** - * Delete execution by ID (async version) - */ -export async function deleteExecutionAsync(baseDir: string, executionId: string): Promise<{ success: boolean; error?: string }> { - const store = await getSqliteStore(baseDir); - return store.deleteConversation(executionId); -} - -/** - * Delete execution by ID (sync version - uses cached SQLite module) - */ -export function deleteExecution(baseDir: string, executionId: string): { success: boolean; error?: string } { - try { - const store = getSqliteStoreSync(baseDir); - return store.deleteConversation(executionId); - } catch { - return { success: false, error: 'SQLite store not initialized' }; - } -} - -/** - * Batch delete executions (async) - */ -export async function batchDeleteExecutionsAsync(baseDir: string, ids: string[]): Promise<{ - success: boolean; - deleted: number; - total: number; - errors?: string[]; -}> { - const store = await getSqliteStore(baseDir); - const result = store.batchDelete(ids); - return { ...result, total: ids.length }; -} - -/** - * Get status of all CLI tools - */ -export async function getCliToolsStatus(): Promise> { - const tools = ['gemini', 'qwen', 'codex', 'claude']; - const results: Record = {}; - - await Promise.all(tools.map(async (tool) => { - results[tool] = await checkToolAvailability(tool); - })); - - return results; -} - -// CLI tool package mapping -const CLI_TOOL_PACKAGES: Record = { - gemini: '@google/gemini-cli', - qwen: '@qwen-code/qwen-code', - codex: '@openai/codex', - claude: '@anthropic-ai/claude-code' -}; - -// Disabled tools storage (in-memory fallback, main storage is in cli-config.json) -const disabledTools = new Set(); - -// Default working directory for config operations -let configBaseDir = process.cwd(); - -/** - * Set the base directory for config operations - */ -export function setConfigBaseDir(dir: string): void { - configBaseDir = dir; -} - -/** - * Install a CLI tool via npm - */ -export async function installCliTool(tool: string): Promise<{ success: boolean; error?: string }> { - const packageName = CLI_TOOL_PACKAGES[tool]; - if (!packageName) { - return { success: false, error: `Unknown tool: ${tool}` }; - } - - return new Promise((resolve) => { - const child = spawn('npm', ['install', '-g', packageName], { - shell: true, - stdio: ['ignore', 'pipe', 'pipe'] - }); - - let stderr = ''; - child.stderr?.on('data', (data) => { stderr += data.toString(); }); - - child.on('close', (code) => { - // Clear cache to force re-check - toolAvailabilityCache.delete(tool); - - if (code === 0) { - resolve({ success: true }); - } else { - resolve({ success: false, error: stderr || `npm install failed with code ${code}` }); - } - }); - - child.on('error', (err) => { - resolve({ success: false, error: err.message }); - }); - - // Timeout after 2 minutes - setTimeout(() => { - child.kill(); - resolve({ success: false, error: 'Installation timed out' }); - }, 120000); - }); -} - -/** - * Uninstall a CLI tool via npm - */ -export async function uninstallCliTool(tool: string): Promise<{ success: boolean; error?: string }> { - const packageName = CLI_TOOL_PACKAGES[tool]; - if (!packageName) { - return { success: false, error: `Unknown tool: ${tool}` }; - } - - return new Promise((resolve) => { - const child = spawn('npm', ['uninstall', '-g', packageName], { - shell: true, - stdio: ['ignore', 'pipe', 'pipe'] - }); - - let stderr = ''; - child.stderr?.on('data', (data) => { stderr += data.toString(); }); - - child.on('close', (code) => { - // Clear cache to force re-check - toolAvailabilityCache.delete(tool); - - if (code === 0) { - resolve({ success: true }); - } else { - resolve({ success: false, error: stderr || `npm uninstall failed with code ${code}` }); - } - }); - - child.on('error', (err) => { - resolve({ success: false, error: err.message }); - }); - - // Timeout after 1 minute - setTimeout(() => { - child.kill(); - resolve({ success: false, error: 'Uninstallation timed out' }); - }, 60000); - }); -} - -/** - * Enable a CLI tool (updates config file) - */ -export function enableCliTool(tool: string): { success: boolean } { - try { - enableToolFromConfig(configBaseDir, tool); - disabledTools.delete(tool); // Also update in-memory fallback - return { success: true }; - } catch (err) { - console.error('[cli-executor] Error enabling tool:', err); - disabledTools.delete(tool); // Fallback to in-memory - return { success: true }; - } -} - -/** - * Disable a CLI tool (updates config file) - */ -export function disableCliTool(tool: string): { success: boolean } { - try { - disableToolFromConfig(configBaseDir, tool); - disabledTools.add(tool); // Also update in-memory fallback - return { success: true }; - } catch (err) { - console.error('[cli-executor] Error disabling tool:', err); - disabledTools.add(tool); // Fallback to in-memory - return { success: true }; - } -} - -/** - * Check if a tool is enabled (reads from config file) - */ -export function isToolEnabled(tool: string): boolean { - try { - return isToolEnabledFromConfig(configBaseDir, tool); - } catch { - // Fallback to in-memory check - return !disabledTools.has(tool); - } -} - -/** - * Get full status of all CLI tools including enabled state - */ -export async function getCliToolsFullStatus(): Promise> { - const tools = Object.keys(CLI_TOOL_PACKAGES); - const results: Record = {}; - - await Promise.all(tools.map(async (tool) => { - const availability = await checkToolAvailability(tool); - results[tool] = { - available: availability.available, - enabled: isToolEnabled(tool), - path: availability.path, - packageName: CLI_TOOL_PACKAGES[tool] - }; - })); - - return results; -} - -// ========== Prompt Concatenation System ========== - -/** - * Turn data structure for concatenation - */ -interface TurnData { - turn: number; - timestamp?: string; - role: 'user' | 'assistant'; - content: string; - status?: string; - duration_ms?: number; - source_id?: string; // For merged conversations -} - -/** - * Prompt concatenation options - */ -interface ConcatOptions { - format: PromptFormat; - includeMetadata?: boolean; - includeTurnMarkers?: boolean; - maxOutputLength?: number; // Truncate output for context efficiency -} - -/** - * PromptConcatenator - Dedicated class for building multi-turn prompts - * Supports multiple output formats: plain text, YAML, JSON - */ -class PromptConcatenator { - private turns: TurnData[] = []; - private options: ConcatOptions; - private metadata: Record = {}; - - constructor(options: Partial = {}) { - this.options = { - format: options.format || 'plain', - includeMetadata: options.includeMetadata ?? true, - includeTurnMarkers: options.includeTurnMarkers ?? true, - maxOutputLength: options.maxOutputLength || 8192 - }; - } - - /** - * Set metadata for the conversation - */ - setMetadata(key: string, value: unknown): this { - this.metadata[key] = value; - return this; - } - - /** - * Add a user turn - */ - addUserTurn(content: string, options: Partial> = {}): this { - this.turns.push({ - turn: this.turns.length + 1, - role: 'user', - content, - ...options - }); - return this; - } - - /** - * Add an assistant turn - */ - addAssistantTurn(content: string, options: Partial> = {}): this { - // Truncate output if needed - const truncatedContent = content.length > this.options.maxOutputLength! - ? content.substring(0, this.options.maxOutputLength!) + '\n... [truncated]' - : content; - - this.turns.push({ - turn: this.turns.length + 1, - role: 'assistant', - content: truncatedContent, - ...options - }); - return this; - } - - /** - * Add a conversation turn from ConversationTurn - */ - addFromConversationTurn(turn: ConversationTurn, sourceId?: string): this { - this.addUserTurn(turn.prompt, { - turn: turn.turn * 2 - 1, - timestamp: turn.timestamp, - source_id: sourceId - }); - this.addAssistantTurn(turn.output.stdout || '[No output]', { - turn: turn.turn * 2, - timestamp: turn.timestamp, - status: turn.status, - duration_ms: turn.duration_ms, - source_id: sourceId - }); - return this; - } - - /** - * Load turns from an existing conversation - */ - loadConversation(conversation: ConversationRecord): this { - for (const turn of conversation.turns) { - this.addFromConversationTurn(turn); - } - return this; - } - - /** - * Build the final prompt in plain text format - */ - private buildPlainText(newPrompt: string): string { - const parts: string[] = []; - - // Metadata section - if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { - parts.push('=== CONTEXT ==='); - for (const [key, value] of Object.entries(this.metadata)) { - parts.push(`${key}: ${String(value)}`); - } - parts.push(''); - } - - // Conversation history - if (this.turns.length > 0) { - parts.push('=== CONVERSATION HISTORY ==='); - parts.push(''); - - let currentTurn = 0; - for (let i = 0; i < this.turns.length; i += 2) { - currentTurn++; - const userTurn = this.turns[i]; - const assistantTurn = this.turns[i + 1]; - - if (this.options.includeTurnMarkers) { - const sourceMarker = userTurn.source_id ? ` [${userTurn.source_id}]` : ''; - parts.push(`--- Turn ${currentTurn}${sourceMarker} ---`); - } - - parts.push('USER:'); - parts.push(userTurn.content); - parts.push(''); - - if (assistantTurn) { - parts.push('ASSISTANT:'); - parts.push(assistantTurn.content); - parts.push(''); - } - } - } - - // New request - parts.push('=== NEW REQUEST ==='); - parts.push(''); - parts.push(newPrompt); - - return parts.join('\n'); - } - - /** - * Build the final prompt in YAML format - */ - private buildYaml(newPrompt: string): string { - const yamlLines: string[] = []; - - // Metadata - if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { - yamlLines.push('context:'); - for (const [key, value] of Object.entries(this.metadata)) { - yamlLines.push(` ${key}: ${this.yamlValue(value)}`); - } - yamlLines.push(''); - } - - // Conversation history - if (this.turns.length > 0) { - yamlLines.push('conversation:'); - - let currentTurn = 0; - for (let i = 0; i < this.turns.length; i += 2) { - currentTurn++; - const userTurn = this.turns[i]; - const assistantTurn = this.turns[i + 1]; - - yamlLines.push(` - turn: ${currentTurn}`); - if (userTurn.source_id) { - yamlLines.push(` source: ${userTurn.source_id}`); - } - if (userTurn.timestamp) { - yamlLines.push(` timestamp: ${userTurn.timestamp}`); - } - - // User message - yamlLines.push(' user: |'); - const userLines = userTurn.content.split('\n'); - for (const line of userLines) { - yamlLines.push(` ${line}`); - } - - // Assistant message - if (assistantTurn) { - if (assistantTurn.status) { - yamlLines.push(` status: ${assistantTurn.status}`); - } - if (assistantTurn.duration_ms) { - yamlLines.push(` duration_ms: ${assistantTurn.duration_ms}`); - } - yamlLines.push(' assistant: |'); - const assistantLines = assistantTurn.content.split('\n'); - for (const line of assistantLines) { - yamlLines.push(` ${line}`); - } - } - yamlLines.push(''); - } - } - - // New request - yamlLines.push('new_request: |'); - const requestLines = newPrompt.split('\n'); - for (const line of requestLines) { - yamlLines.push(` ${line}`); - } - - return yamlLines.join('\n'); - } - - /** - * Build the final prompt in JSON format - */ - private buildJson(newPrompt: string): string { - const data: Record = {}; - - // Metadata - if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { - data.context = this.metadata; - } - - // Conversation history - if (this.turns.length > 0) { - const conversation: Array<{ - turn: number; - source?: string; - timestamp?: string; - user: string; - assistant?: string; - status?: string; - duration_ms?: number; - }> = []; - - for (let i = 0; i < this.turns.length; i += 2) { - const userTurn = this.turns[i]; - const assistantTurn = this.turns[i + 1]; - - const turnData: typeof conversation[0] = { - turn: Math.ceil((i + 1) / 2), - user: userTurn.content - }; - - if (userTurn.source_id) turnData.source = userTurn.source_id; - if (userTurn.timestamp) turnData.timestamp = userTurn.timestamp; - if (assistantTurn) { - turnData.assistant = assistantTurn.content; - if (assistantTurn.status) turnData.status = assistantTurn.status; - if (assistantTurn.duration_ms) turnData.duration_ms = assistantTurn.duration_ms; - } - - conversation.push(turnData); - } - - data.conversation = conversation; - } - - data.new_request = newPrompt; - - return JSON.stringify(data, null, 2); - } - - /** - * Helper to format YAML values - */ - private yamlValue(value: unknown): string { - if (typeof value === 'string') { - // Quote strings that might be interpreted as other types - if (/[:\[\]{}#&*!|>'"@`]/.test(value) || value === '') { - return `"${value.replace(/"/g, '\\"')}"`; - } - return value; - } - if (typeof value === 'number' || typeof value === 'boolean') { - return String(value); - } - if (value === null || value === undefined) { - return 'null'; - } - return JSON.stringify(value); - } - - /** - * Build the final prompt string - */ - build(newPrompt: string): string { - switch (this.options.format) { - case 'yaml': - return this.buildYaml(newPrompt); - case 'json': - return this.buildJson(newPrompt); - case 'plain': - default: - return this.buildPlainText(newPrompt); - } - } - - /** - * Reset the concatenator for reuse - */ - reset(): this { - this.turns = []; - this.metadata = {}; - return this; - } -} - -/** - * Create a prompt concatenator with specified options - */ -function createPromptConcatenator(options?: Partial): PromptConcatenator { - return new PromptConcatenator(options); -} - -/** - * Quick helper to build a multi-turn prompt in any format - */ -function buildPrompt( - conversation: ConversationRecord, - newPrompt: string, - format: PromptFormat = 'plain' -): string { - return createPromptConcatenator({ format }) - .loadConversation(conversation) - .build(newPrompt); -} - -/** - * Build multi-turn prompt with full conversation history - * Uses the PromptConcatenator with plain text format by default - */ -function buildMultiTurnPrompt( - conversation: ConversationRecord, - newPrompt: string, - format: PromptFormat = 'plain' -): string { - return buildPrompt(conversation, newPrompt, format); -} - -/** - * Build continuation prompt with previous conversation context (legacy) - */ -function buildContinuationPrompt(previous: ExecutionRecord, additionalPrompt?: string): string { - const parts: string[] = []; - - // Add previous conversation context - parts.push('=== PREVIOUS CONVERSATION ==='); - parts.push(''); - parts.push('USER PROMPT:'); - parts.push(previous.prompt); - parts.push(''); - parts.push('ASSISTANT RESPONSE:'); - parts.push(previous.output.stdout || '[No output recorded]'); - parts.push(''); - parts.push('=== CONTINUATION ==='); - parts.push(''); - - if (additionalPrompt) { - parts.push(additionalPrompt); - } else { - parts.push('Continue from where we left off. What should we do next?'); - } - - return parts.join('\n'); -} - -/** - * Get previous execution for resume - * @param baseDir - Working directory - * @param tool - Tool to filter by - * @param resume - true for last, or execution ID string - */ -function getPreviousExecution(baseDir: string, tool: string, resume: boolean | string): ExecutionRecord | null { - if (typeof resume === 'string') { - // Resume specific execution by ID - return getExecutionDetail(baseDir, resume); - } else if (resume === true) { - // Resume last execution for this tool - const history = getExecutionHistory(baseDir, { limit: 1, tool }); - if (history.executions.length === 0) { - return null; - } - return getExecutionDetail(baseDir, history.executions[0].id); - } - return null; -} - -/** - * Get latest execution for a specific tool - */ -export function getLatestExecution(baseDir: string, tool?: string): ExecutionRecord | null { - const history = getExecutionHistory(baseDir, { limit: 1, tool: tool || null }); - if (history.executions.length === 0) { - return null; - } - return getExecutionDetail(baseDir, history.executions[0].id); -} - -// ========== Native Session Content Functions ========== - -/** - * Get native session content by CCW ID - * Parses the native session file and returns full conversation data - */ -export async function getNativeSessionContent(baseDir: string, ccwId: string) { - const store = await getSqliteStore(baseDir); - return store.getNativeSessionContent(ccwId); -} - -/** - * Get formatted native conversation text - */ -export async function getFormattedNativeConversation(baseDir: string, ccwId: string, options?: { - includeThoughts?: boolean; - includeToolCalls?: boolean; - includeTokens?: boolean; - maxContentLength?: number; -}) { - const store = await getSqliteStore(baseDir); - return store.getFormattedNativeConversation(ccwId, options); -} - -/** - * Get conversation pairs from native session - */ -export async function getNativeConversationPairs(baseDir: string, ccwId: string) { - const store = await getSqliteStore(baseDir); - return store.getNativeConversationPairs(ccwId); -} - -/** - * Get enriched conversation (CCW + native session merged) - */ -export async function getEnrichedConversation(baseDir: string, ccwId: string) { - const store = await getSqliteStore(baseDir); - return store.getEnrichedConversation(ccwId); -} - -/** - * Get history with native session info - * Supports recursive querying of child projects - */ -export async function getHistoryWithNativeInfo(baseDir: string, options?: { - limit?: number; - offset?: number; - tool?: string | null; - status?: string | null; - category?: ExecutionCategory | null; - search?: string | null; - recursive?: boolean; -}) { - const { limit = 50, recursive = false, ...queryOptions } = options || {}; - - // Non-recursive mode: query single project - if (!recursive) { - const store = await getSqliteStore(baseDir); - return store.getHistoryWithNativeInfo({ limit, ...queryOptions }); - } - - // Recursive mode: aggregate data from parent and all child projects - const { scanChildProjectsAsync } = await import('../config/storage-paths.js'); - const childProjects = await scanChildProjectsAsync(baseDir); - - // Use the same type as store.getHistoryWithNativeInfo returns - type ExecutionWithNativeAndSource = HistoryIndexEntry & { - hasNativeSession: boolean; - nativeSessionId?: string; - nativeSessionPath?: string; - }; - - const allExecutions: ExecutionWithNativeAndSource[] = []; - let totalCount = 0; - - // Query parent project - try { - const parentStore = await getSqliteStore(baseDir); - const parentResult = parentStore.getHistoryWithNativeInfo({ limit, ...queryOptions }); - totalCount += parentResult.total; - - for (const exec of parentResult.executions) { - allExecutions.push({ ...exec, sourceDir: baseDir }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error); - } - } - - // Query all child projects - for (const child of childProjects) { - try { - const childStore = await getSqliteStore(child.projectPath); - const childResult = childStore.getHistoryWithNativeInfo({ limit, ...queryOptions }); - totalCount += childResult.total; - - for (const exec of childResult.executions) { - allExecutions.push({ ...exec, sourceDir: child.projectPath }); - } - } catch (error) { - if (process.env.DEBUG) { - console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error); - } - } - } - - // Sort by updated_at descending and apply limit - allExecutions.sort((a, b) => { - const timeA = a.updated_at ? new Date(a.updated_at).getTime() : new Date(a.timestamp).getTime(); - const timeB = b.updated_at ? new Date(b.updated_at).getTime() : new Date(b.timestamp).getTime(); - return timeB - timeA; - }); - const limitedExecutions = allExecutions.slice(0, limit); - - return { - total: totalCount, - count: limitedExecutions.length, - executions: limitedExecutions - }; -} - -// Export types -export type { ConversationRecord, ConversationTurn, ExecutionRecord, PromptFormat, ConcatOptions }; - -// Export utility functions and tool definition for backward compatibility -export { executeCliTool, checkToolAvailability }; - -// Export prompt concatenation utilities -export { PromptConcatenator, createPromptConcatenator, buildPrompt, buildMultiTurnPrompt }; - -// Note: Async storage functions (getExecutionHistoryAsync, deleteExecutionAsync, -// batchDeleteExecutionsAsync) are exported at declaration site - SQLite storage only - -// Export tool definition (for legacy imports) - This allows direct calls to execute with onOutput -export const cliExecutorTool = { - schema, - execute: executeCliTool // Use executeCliTool directly which supports onOutput callback -}; +export * from './cli-executor-core.js'; diff --git a/ccw/src/tools/cli-prompt-builder.ts b/ccw/src/tools/cli-prompt-builder.ts new file mode 100644 index 00000000..8ba90a5d --- /dev/null +++ b/ccw/src/tools/cli-prompt-builder.ts @@ -0,0 +1,434 @@ +/** + * CLI Prompt Builder + * Prompt concatenation + multi-turn formatting helpers + */ + +import type { ConversationRecord, ConversationTurn } from './cli-executor-state.js'; + +// Prompt concatenation format types +export type PromptFormat = 'plain' | 'yaml' | 'json'; + +/** + * Merge multiple conversations into a unified context + * Returns merged turns sorted by timestamp with source tracking + */ +export interface MergedTurn extends ConversationTurn { + source_id: string; // Original conversation ID +} + +export interface MergeResult { + mergedTurns: MergedTurn[]; + sourceConversations: ConversationRecord[]; + totalDuration: number; +} + +export function mergeConversations(conversations: ConversationRecord[]): MergeResult { + const mergedTurns: MergedTurn[] = []; + + // Collect all turns with source tracking + for (const conv of conversations) { + for (const turn of conv.turns) { + mergedTurns.push({ + ...turn, + source_id: conv.id + }); + } + } + + // Sort by timestamp + mergedTurns.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()); + + // Re-number turns + mergedTurns.forEach((turn, idx) => { + turn.turn = idx + 1; + }); + + // Calculate total duration + const totalDuration = mergedTurns.reduce((sum, t) => sum + t.duration_ms, 0); + + return { + mergedTurns, + sourceConversations: conversations, + totalDuration + }; +} + +/** + * Build prompt from merged conversations + */ +export function buildMergedPrompt( + mergeResult: MergeResult, + newPrompt: string, + format: PromptFormat = 'plain' +): string { + const concatenator = createPromptConcatenator({ format }); + + // Set metadata for merged conversations + concatenator.setMetadata( + 'merged_sources', + mergeResult.sourceConversations.map(c => c.id).join(', ') + ); + + // Add all merged turns with source tracking + for (const turn of mergeResult.mergedTurns) { + concatenator.addFromConversationTurn(turn, turn.source_id); + } + + return concatenator.build(newPrompt); +} + +/** + * Turn data structure for concatenation + */ +interface TurnData { + turn: number; + timestamp?: string; + role: 'user' | 'assistant'; + content: string; + status?: string; + duration_ms?: number; + source_id?: string; // For merged conversations +} + +/** + * Prompt concatenation options + */ +export interface ConcatOptions { + format: PromptFormat; + includeMetadata?: boolean; + includeTurnMarkers?: boolean; + maxOutputLength?: number; // Truncate output for context efficiency +} + +/** + * PromptConcatenator - Dedicated class for building multi-turn prompts + * Supports multiple output formats: plain text, YAML, JSON + */ +export class PromptConcatenator { + private turns: TurnData[] = []; + private options: ConcatOptions; + private metadata: Record = {}; + + constructor(options: Partial = {}) { + this.options = { + format: options.format || 'plain', + includeMetadata: options.includeMetadata ?? true, + includeTurnMarkers: options.includeTurnMarkers ?? true, + maxOutputLength: options.maxOutputLength || 8192 + }; + } + + /** + * Set metadata for the conversation + */ + setMetadata(key: string, value: unknown): this { + this.metadata[key] = value; + return this; + } + + /** + * Add a user turn + */ + addUserTurn(content: string, options: Partial> = {}): this { + this.turns.push({ + turn: this.turns.length + 1, + role: 'user', + content, + ...options + }); + return this; + } + + /** + * Add an assistant turn + */ + addAssistantTurn(content: string, options: Partial> = {}): this { + // Truncate output if needed + const truncatedContent = content.length > this.options.maxOutputLength! + ? content.substring(0, this.options.maxOutputLength!) + '\n... [truncated]' + : content; + + this.turns.push({ + turn: this.turns.length + 1, + role: 'assistant', + content: truncatedContent, + ...options + }); + return this; + } + + /** + * Add a conversation turn from ConversationTurn + */ + addFromConversationTurn(turn: ConversationTurn, sourceId?: string): this { + this.addUserTurn(turn.prompt, { + turn: turn.turn * 2 - 1, + timestamp: turn.timestamp, + source_id: sourceId + }); + this.addAssistantTurn(turn.output.stdout || '[No output]', { + turn: turn.turn * 2, + timestamp: turn.timestamp, + status: turn.status, + duration_ms: turn.duration_ms, + source_id: sourceId + }); + return this; + } + + /** + * Load turns from an existing conversation + */ + loadConversation(conversation: ConversationRecord): this { + for (const turn of conversation.turns) { + this.addFromConversationTurn(turn); + } + return this; + } + + /** + * Build the final prompt in plain text format + */ + private buildPlainText(newPrompt: string): string { + const parts: string[] = []; + + // Metadata section + if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { + parts.push('=== CONTEXT ==='); + for (const [key, value] of Object.entries(this.metadata)) { + parts.push(`${key}: ${String(value)}`); + } + parts.push(''); + } + + // Conversation history + if (this.turns.length > 0) { + parts.push('=== CONVERSATION HISTORY ==='); + parts.push(''); + + let currentTurn = 0; + for (let i = 0; i < this.turns.length; i += 2) { + currentTurn++; + const userTurn = this.turns[i]; + const assistantTurn = this.turns[i + 1]; + + if (this.options.includeTurnMarkers) { + const sourceMarker = userTurn.source_id ? ` [${userTurn.source_id}]` : ''; + parts.push(`--- Turn ${currentTurn}${sourceMarker} ---`); + } + + parts.push('USER:'); + parts.push(userTurn.content); + parts.push(''); + + if (assistantTurn) { + parts.push('ASSISTANT:'); + parts.push(assistantTurn.content); + parts.push(''); + } + } + } + + // New request + parts.push('=== NEW REQUEST ==='); + parts.push(''); + parts.push(newPrompt); + + return parts.join('\n'); + } + + /** + * Build the final prompt in YAML format + */ + private buildYaml(newPrompt: string): string { + const yamlLines: string[] = []; + + // Metadata + if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { + yamlLines.push('context:'); + for (const [key, value] of Object.entries(this.metadata)) { + yamlLines.push(` ${key}: ${this.yamlValue(value)}`); + } + yamlLines.push(''); + } + + // Conversation history + if (this.turns.length > 0) { + yamlLines.push('conversation:'); + + let currentTurn = 0; + for (let i = 0; i < this.turns.length; i += 2) { + currentTurn++; + const userTurn = this.turns[i]; + const assistantTurn = this.turns[i + 1]; + + yamlLines.push(` - turn: ${currentTurn}`); + if (userTurn.source_id) { + yamlLines.push(` source: ${userTurn.source_id}`); + } + if (userTurn.timestamp) { + yamlLines.push(` timestamp: ${userTurn.timestamp}`); + } + + // User message + yamlLines.push(' user: |'); + const userLines = userTurn.content.split('\n'); + for (const line of userLines) { + yamlLines.push(` ${line}`); + } + + // Assistant message + if (assistantTurn) { + if (assistantTurn.status) { + yamlLines.push(` status: ${assistantTurn.status}`); + } + if (assistantTurn.duration_ms) { + yamlLines.push(` duration_ms: ${assistantTurn.duration_ms}`); + } + yamlLines.push(' assistant: |'); + const assistantLines = assistantTurn.content.split('\n'); + for (const line of assistantLines) { + yamlLines.push(` ${line}`); + } + } + yamlLines.push(''); + } + } + + // New request + yamlLines.push('new_request: |'); + const requestLines = newPrompt.split('\n'); + for (const line of requestLines) { + yamlLines.push(` ${line}`); + } + + return yamlLines.join('\n'); + } + + /** + * Build the final prompt in JSON format + */ + private buildJson(newPrompt: string): string { + const data: Record = {}; + + // Metadata + if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) { + data.context = this.metadata; + } + + // Conversation history + if (this.turns.length > 0) { + const conversation: Array<{ + turn: number; + source?: string; + timestamp?: string; + user: string; + assistant?: string; + status?: string; + duration_ms?: number; + }> = []; + + for (let i = 0; i < this.turns.length; i += 2) { + const userTurn = this.turns[i]; + const assistantTurn = this.turns[i + 1]; + + const turnData: typeof conversation[0] = { + turn: Math.ceil((i + 1) / 2), + user: userTurn.content + }; + + if (userTurn.source_id) turnData.source = userTurn.source_id; + if (userTurn.timestamp) turnData.timestamp = userTurn.timestamp; + if (assistantTurn) { + turnData.assistant = assistantTurn.content; + if (assistantTurn.status) turnData.status = assistantTurn.status; + if (assistantTurn.duration_ms) turnData.duration_ms = assistantTurn.duration_ms; + } + + conversation.push(turnData); + } + + data.conversation = conversation; + } + + data.new_request = newPrompt; + + return JSON.stringify(data, null, 2); + } + + /** + * Helper to format YAML values + */ + private yamlValue(value: unknown): string { + if (typeof value === 'string') { + // Quote strings that might be interpreted as other types + if (/[:\[\]{}#&*!|>'"@`]/.test(value) || value === '') { + return `"${value.replace(/"/g, '\\"')}"`; + } + return value; + } + if (typeof value === 'number' || typeof value === 'boolean') { + return String(value); + } + if (value === null || value === undefined) { + return 'null'; + } + return JSON.stringify(value); + } + + /** + * Build the final prompt string + */ + build(newPrompt: string): string { + switch (this.options.format) { + case 'yaml': + return this.buildYaml(newPrompt); + case 'json': + return this.buildJson(newPrompt); + case 'plain': + default: + return this.buildPlainText(newPrompt); + } + } + + /** + * Reset the concatenator for reuse + */ + reset(): this { + this.turns = []; + this.metadata = {}; + return this; + } +} + +/** + * Create a prompt concatenator with specified options + */ +export function createPromptConcatenator(options?: Partial): PromptConcatenator { + return new PromptConcatenator(options); +} + +/** + * Quick helper to build a multi-turn prompt in any format + */ +export function buildPrompt( + conversation: ConversationRecord, + newPrompt: string, + format: PromptFormat = 'plain' +): string { + return createPromptConcatenator({ format }) + .loadConversation(conversation) + .build(newPrompt); +} + +/** + * Build multi-turn prompt with full conversation history + * Uses the PromptConcatenator with plain text format by default + */ +export function buildMultiTurnPrompt( + conversation: ConversationRecord, + newPrompt: string, + format: PromptFormat = 'plain' +): string { + return buildPrompt(conversation, newPrompt, format); +} diff --git a/ccw/src/tools/codex-lens.ts b/ccw/src/tools/codex-lens.ts index f453813b..0a0e84d5 100644 --- a/ccw/src/tools/codex-lens.ts +++ b/ccw/src/tools/codex-lens.ts @@ -17,6 +17,7 @@ import { join, dirname } from 'path'; import { homedir } from 'os'; import { fileURLToPath } from 'url'; import { getSystemPython } from '../utils/python-utils.js'; +import { EXEC_TIMEOUTS } from '../utils/exec-constants.js'; // Get directory of this module const __filename = fileURLToPath(import.meta.url); @@ -340,7 +341,7 @@ async function ensureLiteLLMEmbedderReady(): Promise { for (const localPath of possiblePaths) { if (existsSync(join(localPath, 'pyproject.toml'))) { console.log(`[CodexLens] Installing ccw-litellm from local path: ${localPath}`); - execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit' }); + execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL }); installed = true; break; } @@ -348,7 +349,7 @@ async function ensureLiteLLMEmbedderReady(): Promise { if (!installed) { console.log('[CodexLens] Installing ccw-litellm from PyPI...'); - execSync(`"${pipPath}" install ccw-litellm`, { stdio: 'inherit' }); + execSync(`"${pipPath}" install ccw-litellm`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL }); } return { success: true }; @@ -426,11 +427,11 @@ async function detectGpuSupport(): Promise<{ mode: GpuMode; available: GpuMode[] // Check for NVIDIA GPU (CUDA) try { if (process.platform === 'win32') { - execSync('nvidia-smi', { stdio: 'pipe' }); + execSync('nvidia-smi', { stdio: 'pipe', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); available.push('cuda'); detectedInfo = 'NVIDIA GPU detected (CUDA available)'; } else { - execSync('which nvidia-smi', { stdio: 'pipe' }); + execSync('which nvidia-smi', { stdio: 'pipe', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); available.push('cuda'); detectedInfo = 'NVIDIA GPU detected (CUDA available)'; } @@ -503,7 +504,7 @@ async function installSemantic(gpuMode: GpuMode = 'cpu'): Promise { try { console.log('[CodexLens] Creating virtual environment...'); const pythonCmd = getSystemPython(); - execSync(`${pythonCmd} -m venv "${CODEXLENS_VENV}"`, { stdio: 'inherit' }); + execSync(`${pythonCmd} -m venv "${CODEXLENS_VENV}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PROCESS_SPAWN }); } catch (err) { return { success: false, error: `Failed to create venv: ${(err as Error).message}` }; } @@ -651,7 +652,7 @@ async function bootstrapVenv(): Promise { for (const localPath of possiblePaths) { if (existsSync(join(localPath, 'pyproject.toml'))) { console.log(`[CodexLens] Installing from local path: ${localPath}`); - execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit' }); + execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL }); installed = true; break; } @@ -659,7 +660,7 @@ async function bootstrapVenv(): Promise { if (!installed) { console.log('[CodexLens] Installing from PyPI...'); - execSync(`"${pipPath}" install codexlens`, { stdio: 'inherit' }); + execSync(`"${pipPath}" install codexlens`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL }); } // Clear cache after successful installation @@ -1368,7 +1369,7 @@ async function uninstallCodexLens(): Promise { const { execSync } = await import('child_process'); try { // Kill any python processes from our venv that might be holding file locks - execSync(`taskkill /F /IM python.exe /FI "MODULES eq sqlite3" 2>nul`, { stdio: 'ignore' }); + execSync(`taskkill /F /IM python.exe /FI "MODULES eq sqlite3" 2>nul`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); } catch { // Ignore errors - no processes to kill } @@ -1397,7 +1398,7 @@ async function uninstallCodexLens(): Promise { try { const { execSync } = await import('child_process'); // Try to close handles on the specific file - execSync(`handle -c ${err.path} -y 2>nul`, { stdio: 'ignore' }); + execSync(`handle -c ${err.path} -y 2>nul`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); } catch { // handle.exe may not be installed, ignore } @@ -1454,7 +1455,7 @@ function cancelIndexing(): { success: boolean; message?: string; error?: string // On Windows, use taskkill to kill the process tree const { execSync } = require('child_process'); try { - execSync(`taskkill /pid ${currentIndexingProcess.pid} /T /F`, { stdio: 'ignore' }); + execSync(`taskkill /pid ${currentIndexingProcess.pid} /T /F`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); } catch { // Process may have already exited } diff --git a/ccw/src/tools/detect-changed-modules.ts b/ccw/src/tools/detect-changed-modules.ts index 97a454fe..bd4c9c51 100644 --- a/ccw/src/tools/detect-changed-modules.ts +++ b/ccw/src/tools/detect-changed-modules.ts @@ -8,6 +8,15 @@ import type { ToolSchema, ToolResult } from '../types/tool.js'; import { readdirSync, statSync, existsSync } from 'fs'; import { join, resolve, dirname, extname, relative } from 'path'; import { execSync } from 'child_process'; +import { EXEC_TIMEOUTS } from '../utils/exec-constants.js'; + +function isExecTimeoutError(error: unknown): boolean { + const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null; + const code = err?.code ?? err?.errno; + if (code === 'ETIMEDOUT') return true; + const message = typeof err?.message === 'string' ? err.message : ''; + return message.includes('ETIMEDOUT'); +} // Source file extensions to track const SOURCE_EXTENSIONS = [ @@ -53,9 +62,12 @@ interface ToolOutput { */ function isGitRepo(basePath: string): boolean { try { - execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe' }); + execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe', timeout: EXEC_TIMEOUTS.GIT_QUICK }); return true; - } catch (e) { + } catch (e: unknown) { + if (isExecTimeoutError(e)) { + console.warn(`[detect_changed_modules] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`); + } return false; } } @@ -69,13 +81,15 @@ function getGitChangedFiles(basePath: string): string[] { let output = execSync('git diff --name-only HEAD 2>/dev/null', { cwd: basePath, encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_DIFF, }).trim(); const cachedOutput = execSync('git diff --name-only --cached 2>/dev/null', { cwd: basePath, encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_DIFF, }).trim(); if (cachedOutput) { @@ -87,12 +101,16 @@ function getGitChangedFiles(basePath: string): string[] { output = execSync('git diff --name-only HEAD~1 HEAD 2>/dev/null', { cwd: basePath, encoding: 'utf8', - stdio: ['pipe', 'pipe', 'pipe'] + stdio: ['pipe', 'pipe', 'pipe'], + timeout: EXEC_TIMEOUTS.GIT_DIFF, }).trim(); } return output ? output.split('\n').filter(f => f.trim()) : []; - } catch (e) { + } catch (e: unknown) { + if (isExecTimeoutError(e)) { + console.warn(`[detect_changed_modules] git diff timed out after ${EXEC_TIMEOUTS.GIT_DIFF}ms`); + } return []; } } diff --git a/ccw/src/tools/smart-search.ts b/ccw/src/tools/smart-search.ts index 64f72c52..52fbf542 100644 --- a/ccw/src/tools/smart-search.ts +++ b/ccw/src/tools/smart-search.ts @@ -26,6 +26,7 @@ import { } from './codex-lens.js'; import type { ProgressInfo } from './codex-lens.js'; import { getProjectRoot } from '../utils/path-validator.js'; +import { EXEC_TIMEOUTS } from '../utils/exec-constants.js'; // Timing utilities for performance analysis const TIMING_ENABLED = process.env.SMART_SEARCH_TIMING === '1' || process.env.DEBUG?.includes('timing'); @@ -603,7 +604,7 @@ function checkToolAvailability(toolName: string): boolean { try { const isWindows = process.platform === 'win32'; const command = isWindows ? 'where' : 'which'; - execSync(`${command} ${toolName}`, { stdio: 'ignore' }); + execSync(`${command} ${toolName}`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO }); return true; } catch { return false; diff --git a/ccw/src/utils/exec-constants.ts b/ccw/src/utils/exec-constants.ts new file mode 100644 index 00000000..2ca55614 --- /dev/null +++ b/ccw/src/utils/exec-constants.ts @@ -0,0 +1,24 @@ +/** + * Centralized timeout defaults for synchronous process execution. + * + * `execSync` blocks the Node.js event loop. Always provide a timeout so callers + * fail fast instead of hanging indefinitely when external tools stall. + * + * Guidance: + * - Use `GIT_QUICK` for lightweight git queries (rev-parse, status). + * - Use `GIT_DIFF` for diff-based queries. + * - Use `GIT_LOG` for log/history queries. + * - Use `PYTHON_VERSION` for `python --version` style probes. + * - Use `SYSTEM_INFO` for OS/hardware capability probes (wmic, nvidia-smi, which/where). + * - Use `PROCESS_SPAWN` for short-lived spawn-style operations. + * - Use `PACKAGE_INSTALL` for package manager operations that may take minutes. + */ +export const EXEC_TIMEOUTS = { + GIT_QUICK: 5_000, + GIT_DIFF: 10_000, + GIT_LOG: 15_000, + PYTHON_VERSION: 5_000, + SYSTEM_INFO: 10_000, + PROCESS_SPAWN: 30_000, + PACKAGE_INSTALL: 300_000, +} as const; diff --git a/ccw/src/utils/path-resolver.ts b/ccw/src/utils/path-resolver.ts index 1f477b8d..db1a4eaa 100644 --- a/ccw/src/utils/path-resolver.ts +++ b/ccw/src/utils/path-resolver.ts @@ -69,6 +69,21 @@ export function validatePath(inputPath: string, options: ValidatePathOptions = { return { valid: false, path: null, error: `Invalid path: ${message}` }; } + // Check if within base directory when specified (pre-symlink resolution) + const resolvedBase = baseDir ? resolvePath(baseDir) : null; + if (resolvedBase) { + const relativePath = relative(resolvedBase, resolvedPath); + + // Path traversal detection: relative path should not start with '..' + if (relativePath.startsWith('..') || isAbsolute(relativePath)) { + return { + valid: false, + path: null, + error: `Path must be within ${resolvedBase}` + }; + } + } + // Check if path exists when required if (mustExist && !existsSync(resolvedPath)) { return { valid: false, path: null, error: `Path does not exist: ${resolvedPath}` }; @@ -83,11 +98,30 @@ export function validatePath(inputPath: string, options: ValidatePathOptions = { const message = err instanceof Error ? err.message : String(err); return { valid: false, path: null, error: `Cannot resolve path: ${message}` }; } + } else if (resolvedBase) { + // For non-existent paths, resolve the nearest existing ancestor to prevent symlink-based escapes + // (e.g., baseDir/link/newfile where baseDir/link is a symlink to a disallowed location). + let existingPath = resolvedPath; + while (!existsSync(existingPath)) { + const parent = resolve(existingPath, '..'); + if (parent === existingPath) break; + existingPath = parent; + } + + if (existsSync(existingPath)) { + try { + const realExisting = realpathSync(existingPath); + const remainder = relative(existingPath, resolvedPath); + realPath = remainder && remainder !== '.' ? join(realExisting, remainder) : realExisting; + } catch (err) { + const message = err instanceof Error ? err.message : String(err); + return { valid: false, path: null, error: `Cannot resolve path: ${message}` }; + } + } } - // Check if within base directory when specified - if (baseDir) { - const resolvedBase = resolvePath(baseDir); + // Check if within base directory when specified (post-symlink resolution) + if (resolvedBase) { const relativePath = relative(resolvedBase, realPath); // Path traversal detection: relative path should not start with '..' diff --git a/ccw/src/utils/path-validator.ts b/ccw/src/utils/path-validator.ts index 9cf6916f..14ac5e0f 100644 --- a/ccw/src/utils/path-validator.ts +++ b/ccw/src/utils/path-validator.ts @@ -7,7 +7,7 @@ * Inspired by MCP filesystem server's security model. */ -import { resolve, isAbsolute, normalize, relative } from 'path'; +import { resolve, isAbsolute, normalize, relative, sep } from 'path'; import { realpath, access } from 'fs/promises'; import { constants } from 'fs'; @@ -42,6 +42,27 @@ export function normalizePath(p: string): string { return normalize(p).replace(/\\/g, '/'); } +function canonicalizeForComparison(p: string): string { + const base = getProjectRoot(); + const absolute = isAbsolute(p) ? p : resolve(base, p); + let canonical = normalize(absolute); + + // Remove trailing separators (except drive roots like C:\ and posix root /) + canonical = canonical.replace(/[\\/]+$/, ''); + if (/^[a-zA-Z]:$/.test(canonical)) { + canonical += sep; + } else if (canonical === '') { + canonical = sep; + } + + // Windows paths are case-insensitive. + if (process.platform === 'win32') { + canonical = canonical.toLowerCase(); + } + + return canonical; +} + /** * Check if path is within allowed directories */ @@ -49,12 +70,13 @@ export function isPathWithinAllowedDirectories( targetPath: string, allowedDirectories: string[] ): boolean { - const normalizedTarget = normalizePath(targetPath); + const canonicalTarget = canonicalizeForComparison(targetPath); return allowedDirectories.some(dir => { - const normalizedDir = normalizePath(dir); - // Check if path equals or starts with allowed directory - return normalizedTarget === normalizedDir || - normalizedTarget.startsWith(normalizedDir + '/'); + const canonicalDir = canonicalizeForComparison(dir); + if (canonicalTarget === canonicalDir) return true; + + const boundary = canonicalDir.endsWith(sep) ? canonicalDir : canonicalDir + sep; + return canonicalTarget.startsWith(boundary); }); } diff --git a/ccw/src/utils/python-utils.ts b/ccw/src/utils/python-utils.ts index 73ec0732..b5ac4975 100644 --- a/ccw/src/utils/python-utils.ts +++ b/ccw/src/utils/python-utils.ts @@ -4,6 +4,15 @@ */ import { execSync } from 'child_process'; +import { EXEC_TIMEOUTS } from './exec-constants.js'; + +function isExecTimeoutError(error: unknown): boolean { + const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null; + const code = err?.code ?? err?.errno; + if (code === 'ETIMEDOUT') return true; + const message = typeof err?.message === 'string' ? err.message : ''; + return message.includes('ETIMEDOUT'); +} /** * Parse Python version string to major.minor numbers @@ -40,7 +49,7 @@ export function getSystemPython(): string { const customPython = process.env.CCW_PYTHON; if (customPython) { try { - const version = execSync(`"${customPython}" --version 2>&1`, { encoding: 'utf8' }); + const version = execSync(`"${customPython}" --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION }); if (version.includes('Python 3')) { const parsed = parsePythonVersion(version); if (parsed && !isPythonVersionCompatible(parsed.major, parsed.minor)) { @@ -48,8 +57,12 @@ export function getSystemPython(): string { } return `"${customPython}"`; } - } catch { - console.warn(`[Python] Warning: CCW_PYTHON="${customPython}" is not a valid Python executable, falling back to system Python`); + } catch (err: unknown) { + if (isExecTimeoutError(err)) { + console.warn(`[Python] Warning: CCW_PYTHON version check timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms, falling back to system Python`); + } else { + console.warn(`[Python] Warning: CCW_PYTHON="${customPython}" is not a valid Python executable, falling back to system Python`); + } } } @@ -58,12 +71,15 @@ export function getSystemPython(): string { const compatibleVersions = ['3.12', '3.11', '3.10', '3.9']; for (const ver of compatibleVersions) { try { - const version = execSync(`py -${ver} --version 2>&1`, { encoding: 'utf8' }); + const version = execSync(`py -${ver} --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION }); if (version.includes(`Python ${ver}`)) { console.log(`[Python] Found compatible Python ${ver} via py launcher`); return `py -${ver}`; } - } catch { + } catch (err: unknown) { + if (isExecTimeoutError(err)) { + console.warn(`[Python] Warning: py -${ver} version check timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms`); + } // Version not installed, try next } } @@ -75,7 +91,7 @@ export function getSystemPython(): string { for (const cmd of commands) { try { - const version = execSync(`${cmd} --version 2>&1`, { encoding: 'utf8' }); + const version = execSync(`${cmd} --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION }); if (version.includes('Python 3')) { const parsed = parsePythonVersion(version); if (parsed) { @@ -90,7 +106,10 @@ export function getSystemPython(): string { } } } - } catch { + } catch (err: unknown) { + if (isExecTimeoutError(err)) { + console.warn(`[Python] Warning: ${cmd} --version timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms`); + } // Try next command } } diff --git a/ccw/src/utils/shell-escape.ts b/ccw/src/utils/shell-escape.ts new file mode 100644 index 00000000..4f52282d --- /dev/null +++ b/ccw/src/utils/shell-escape.ts @@ -0,0 +1,26 @@ +/** + * Windows cmd.exe argument escaping for spawn({ shell: true }). + * + * This utility escapes cmd.exe metacharacters using caret (^) so that user + * controlled input cannot inject additional commands. + */ + +const WINDOWS_METACHARS = /[&|<>()%!"]/g; + +export function escapeWindowsArg(arg: string): string { + if (arg === '') return '""'; + + // Escape caret first to avoid double-escaping when prefixing other metachars. + let escaped = arg.replace(/\^/g, '^^'); + + // Escape cmd.exe metacharacters with caret. + escaped = escaped.replace(WINDOWS_METACHARS, '^$&'); + + // Wrap whitespace-containing args in double quotes. + if (/\s/.test(escaped)) { + escaped = `"${escaped}"`; + } + + return escaped; +} + diff --git a/ccw/tests/auth-routes.test.ts b/ccw/tests/auth-routes.test.ts new file mode 100644 index 00000000..814732ea --- /dev/null +++ b/ccw/tests/auth-routes.test.ts @@ -0,0 +1,148 @@ +/** + * Unit tests for auth routes (ccw/dist/core/routes/auth-routes.js). + */ + +import { afterEach, before, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; + +type JsonResponse = { + status: number; + json: any; + text: string; + headers: http.IncomingHttpHeaders; +}; + +async function requestJson(baseUrl: string, method: string, reqPath: string, headers?: Record): Promise { + const url = new URL(reqPath, baseUrl); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { Accept: 'application/json', ...(headers ?? {}) }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody, headers: res.headers }); + }); + }, + ); + req.on('error', reject); + req.end(); + }); +} + +function cookiePairsFromSetCookie(setCookie: string | string[] | undefined): string { + if (!setCookie) return ''; + const items = Array.isArray(setCookie) ? setCookie : [setCookie]; + const pairs: string[] = []; + for (const item of items) { + const pair = item.split(';')[0]?.trim(); + if (pair) pairs.push(pair); + } + return pairs.join('; '); +} + +async function createServer(): Promise<{ server: http.Server; baseUrl: string }> { + const server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath: process.cwd(), + handlePostRequest() {}, + broadcastToClients() {}, + }; + + try { + const handled = await authRoutes.handleAuthRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve, reject) => { + server.listen(0, '127.0.0.1', () => resolve()); + server.on('error', reject); + }); + + const address = server.address(); + if (!address || typeof address === 'string') throw new Error('Expected server to listen on a TCP port'); + return { server, baseUrl: `http://127.0.0.1:${address.port}` }; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let authRoutes: any; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let csrfManager: any; + +describe('auth routes: csrf-token endpoint', async () => { + before(async () => { + authRoutes = await import(new URL('../dist/core/routes/auth-routes.js', import.meta.url).href); + csrfManager = await import(new URL('../dist/core/auth/csrf-manager.js', import.meta.url).href); + }); + + afterEach(() => { + csrfManager.resetCsrfTokenManager(); + }); + + it('GET /api/csrf-token returns token in body, header, and cookie', async () => { + const { server, baseUrl } = await createServer(); + try { + const res = await requestJson(baseUrl, 'GET', '/api/csrf-token'); + assert.equal(res.status, 200); + assert.ok(res.json?.csrfToken); + + const token = String(res.json.csrfToken); + assert.match(token, /^[a-f0-9]{64}$/); + assert.equal(res.headers['x-csrf-token'], token); + + const setCookie = res.headers['set-cookie']; + const cookies = Array.isArray(setCookie) ? setCookie.join('\n') : String(setCookie || ''); + assert.ok(cookies.includes('XSRF-TOKEN=')); + assert.ok(cookies.includes('HttpOnly')); + assert.ok(cookies.includes('SameSite=Strict')); + assert.ok(cookies.includes(token)); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('GET /api/csrf-token returns a new token per request (same session)', async () => { + const { server, baseUrl } = await createServer(); + try { + const first = await requestJson(baseUrl, 'GET', '/api/csrf-token'); + assert.equal(first.status, 200); + const cookieHeader = cookiePairsFromSetCookie(first.headers['set-cookie']); + assert.ok(cookieHeader.includes('ccw_session_id=')); + + const second = await requestJson(baseUrl, 'GET', '/api/csrf-token', { Cookie: cookieHeader }); + assert.equal(second.status, 200); + + assert.notEqual(first.json.csrfToken, second.json.csrfToken); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); +}); diff --git a/ccw/tests/cli-command.test.ts b/ccw/tests/cli-command.test.ts index 3e7cf0df..37044fb0 100644 --- a/ccw/tests/cli-command.test.ts +++ b/ccw/tests/cli-command.test.ts @@ -10,9 +10,10 @@ import { after, afterEach, before, describe, it, mock } from 'node:test'; import assert from 'node:assert/strict'; import http from 'node:http'; -import { mkdtempSync, rmSync } from 'node:fs'; +import { existsSync, mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; import { tmpdir } from 'node:os'; import { join } from 'node:path'; +import inquirer from 'inquirer'; const TEST_CCW_HOME = mkdtempSync(join(tmpdir(), 'ccw-cli-command-')); process.env.CCW_DATA_DIR = TEST_CCW_HOME; @@ -20,6 +21,7 @@ process.env.CCW_DATA_DIR = TEST_CCW_HOME; const cliCommandPath = new URL('../dist/commands/cli.js', import.meta.url).href; const cliExecutorPath = new URL('../dist/tools/cli-executor.js', import.meta.url).href; const historyStorePath = new URL('../dist/tools/cli-history-store.js', import.meta.url).href; +const storageManagerPath = new URL('../dist/tools/storage-manager.js', import.meta.url).href; function stubHttpRequest(): void { mock.method(http, 'request', () => { @@ -50,11 +52,14 @@ describe('cli command module', async () => { let cliExecutorModule: any; // eslint-disable-next-line @typescript-eslint/no-explicit-any let historyStoreModule: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let storageManagerModule: any; before(async () => { cliModule = await import(cliCommandPath); cliExecutorModule = await import(cliExecutorPath); historyStoreModule = await import(historyStorePath); + storageManagerModule = await import(storageManagerPath); }); afterEach(() => { @@ -112,6 +117,117 @@ describe('cli command module', async () => { assert.deepEqual(exitCodes, [0, 0, 0]); }); + it('prints a --file tip when a multi-line prompt is provided via --prompt', async () => { + stubHttpRequest(); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => { + return { + success: true, + stdout: '', + stderr: '', + execution: { id: 'EXEC-ML', duration_ms: 1, status: 'success' }, + conversation: { turn_count: 1, total_duration_ms: 1 }, + }; + }); + + const exitCodes: Array = []; + mock.method(process as any, 'exit', (code?: number) => { + exitCodes.push(code); + }); + + await cliModule.cliCommand('exec', [], { prompt: 'line1\nline2\nline3\nline4', tool: 'gemini', stream: true }); + await new Promise((resolve) => setTimeout(resolve, 200)); + + assert.ok(logs.some((l) => l.includes('Tip: Use --file option to avoid shell escaping issues with multi-line prompts'))); + assert.ok(logs.some((l) => l.includes('Example: ccw cli -f prompt.txt --tool gemini'))); + assert.deepEqual(exitCodes, [0]); + }); + + it('does not print the --file tip for single-line prompts', async () => { + stubHttpRequest(); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => { + return { + success: true, + stdout: '', + stderr: '', + execution: { id: 'EXEC-SL', duration_ms: 1, status: 'success' }, + conversation: { turn_count: 1, total_duration_ms: 1 }, + }; + }); + + const exitCodes: Array = []; + mock.method(process as any, 'exit', (code?: number) => { + exitCodes.push(code); + }); + + await cliModule.cliCommand('exec', [], { prompt: 'Hello', tool: 'gemini', stream: true }); + await new Promise((resolve) => setTimeout(resolve, 200)); + + assert.equal( + logs.some((l) => l.includes('Tip: Use --file option to avoid shell escaping issues with multi-line prompts')), + false, + ); + assert.deepEqual(exitCodes, [0]); + }); + + it('prints full output hint immediately after stderr truncation (no troubleshooting duplicate)', async () => { + stubHttpRequest(); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => { + const stderr = Array.from({ length: 31 }, (_, i) => `stderr-line-${i}`).join('\n'); + return { + success: false, + stdout: '', + stderr, + execution: { id: 'EXEC-ERR', duration_ms: 12, status: 'error', exit_code: 1 }, + conversation: { turn_count: 1, total_duration_ms: 12 }, + }; + }); + + const exitCodes: Array = []; + mock.method(process as any, 'exit', (code?: number) => { + exitCodes.push(code); + }); + + await cliModule.cliCommand('exec', [], { prompt: 'Hello', tool: 'gemini', stream: true }); + await new Promise((resolve) => setTimeout(resolve, 200)); + + const truncationIndex = logs.findIndex((l) => l.includes('... 1 more lines')); + const hintIndex = logs.findIndex((l) => l.includes('💡 View full output: ccw cli output EXEC-ERR')); + assert.ok(truncationIndex >= 0); + assert.ok(hintIndex >= 0); + assert.equal(hintIndex, truncationIndex + 1); + + assert.equal(logs.filter((l) => l.includes('View full output: ccw cli output EXEC-ERR')).length, 1); + assert.equal(logs.filter((l) => l.includes('• View full output')).length, 0); + assert.deepEqual(exitCodes, [1]); + }); + it('supports resume with conversation ID and latest (no prompt required)', async () => { stubHttpRequest(); mock.method(console, 'log', () => {}); @@ -181,6 +297,100 @@ describe('cli command module', async () => { assert.equal(executed, false); }); + it('shows --file guidance first in help output (multi-line prompts)', async () => { + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + await cliModule.cliCommand('--help', [], {}); + + const usageFileIndex = logs.findIndex((l) => l.includes('ccw cli -f prompt.txt')); + const usagePromptIndex = logs.findIndex((l) => l.includes('ccw cli -p ""')); + assert.ok(usageFileIndex >= 0); + assert.ok(usagePromptIndex >= 0); + assert.ok(usageFileIndex < usagePromptIndex); + + const optionFileIndex = logs.findIndex((l) => l.includes('-f, --file ')); + const optionPromptIndex = logs.findIndex((l) => l.includes('-p, --prompt ')); + assert.ok(optionFileIndex >= 0); + assert.ok(optionPromptIndex >= 0); + assert.ok(optionFileIndex < optionPromptIndex); + assert.ok(logs.some((l) => l.includes('Read prompt from file (recommended for multi-line prompts)'))); + + assert.ok(logs.some((l) => l.includes('Examples:'))); + assert.ok(logs.some((l) => l.includes('ccw cli -f my-prompt.txt --tool gemini'))); + assert.ok(logs.some((l) => l.includes("ccw cli -f <(cat <<'EOF'"))); + assert.ok(logs.some((l) => l.includes("@'"))); + assert.ok(logs.some((l) => l.includes('Out-File -Encoding utf8 prompt.tmp; ccw cli -f prompt.tmp --tool gemini'))); + assert.ok(logs.some((l) => l.includes('Tip: For complex prompts, use --file to avoid shell escaping issues'))); + }); + + it('prompts for confirmation before cleaning all storage (and cancels safely)', async () => { + const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-cancel'); + const markerDir = join(projectRoot, 'cli-history'); + mkdirSync(markerDir, { recursive: true }); + writeFileSync(join(markerDir, 'dummy.txt'), '1234'); + + const stats = storageManagerModule.getStorageStats(); + const expectedSize = storageManagerModule.formatBytes(stats.totalSize); + + const promptCalls: any[] = []; + mock.method(inquirer, 'prompt', async (questions: any) => { + promptCalls.push(questions); + return { proceed: false }; + }); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + await cliModule.cliCommand('storage', ['clean'], { force: false }); + + assert.equal(promptCalls.length, 1); + assert.equal(promptCalls[0][0].type, 'confirm'); + assert.equal(promptCalls[0][0].default, false); + assert.ok(promptCalls[0][0].message.includes(`${stats.projectCount} projects`)); + assert.ok(promptCalls[0][0].message.includes(`(${expectedSize})`)); + + assert.ok(logs.some((l) => l.includes('Storage clean cancelled'))); + assert.equal(existsSync(projectRoot), true); + rmSync(projectRoot, { recursive: true, force: true }); + }); + + it('bypasses confirmation prompt when --force is set for storage clean', async () => { + const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-force'); + const markerDir = join(projectRoot, 'cli-history'); + mkdirSync(markerDir, { recursive: true }); + writeFileSync(join(markerDir, 'dummy.txt'), '1234'); + + mock.method(inquirer, 'prompt', async () => { + throw new Error('inquirer.prompt should not be called when --force is set'); + }); + + await cliModule.cliCommand('storage', ['clean'], { force: true }); + assert.equal(existsSync(projectRoot), false); + }); + + it('deletes all storage after interactive confirmation', async () => { + const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-confirm'); + const markerDir = join(projectRoot, 'cli-history'); + mkdirSync(markerDir, { recursive: true }); + writeFileSync(join(markerDir, 'dummy.txt'), '1234'); + + mock.method(inquirer, 'prompt', async () => ({ proceed: true })); + + await cliModule.cliCommand('storage', ['clean'], { force: false }); + assert.equal(existsSync(projectRoot), false); + }); + it('prints history and retrieves conversation detail from SQLite store', async () => { stubHttpRequest(); diff --git a/ccw/tests/cli-executor-kill.test.ts b/ccw/tests/cli-executor-kill.test.ts new file mode 100644 index 00000000..c36a1ba7 --- /dev/null +++ b/ccw/tests/cli-executor-kill.test.ts @@ -0,0 +1,195 @@ +/** + * Regression tests for killCurrentCliProcess timeout handling (DSC-007). + * + * Focus: + * - Avoid stale SIGKILL timers killing a subsequent child process + * - Ensure SIGKILL is sent when SIGTERM does not terminate the process + */ + +import { after, before, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { createRequire } from 'node:module'; +import { EventEmitter } from 'node:events'; +import { PassThrough } from 'node:stream'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const cliExecutorUrl = new URL('../dist/tools/cli-executor.js', import.meta.url).href; +const historyStoreUrl = new URL('../dist/tools/cli-history-store.js', import.meta.url).href; + +type FakeChild = EventEmitter & { + pid?: number; + killed: boolean; + stdin: PassThrough; + stdout: PassThrough; + stderr: PassThrough; + kill: (signal?: string) => boolean; + killCalls: string[]; + close: (code?: number) => void; +}; + +type ToolChildBehavior = { + closeOnSigterm: boolean; +}; + +describe('cli-executor: killCurrentCliProcess regression', async () => { + const require = createRequire(import.meta.url); + const childProcess = require('child_process'); + const originalSpawn = childProcess.spawn; + const originalSetTimeout = globalThis.setTimeout; + + const envSnapshot: Record = {}; + let ccwHome = ''; + let projectDir = ''; + + const toolChildren: FakeChild[] = []; + const plannedBehaviors: ToolChildBehavior[] = []; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let cliExecutorModule: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let historyStoreModule: any; + + function unrefFastSetTimeout( + fn: (...args: TArgs) => void, + delay?: number, + ...args: TArgs + ): ReturnType { + const t = originalSetTimeout(fn as (...args: unknown[]) => void, 25, ...args); + (t as unknown as { unref?: () => void }).unref?.(); + return t; + } + + function createFakeChild(behavior: ToolChildBehavior, pid: number): FakeChild { + const child = new EventEmitter() as FakeChild; + child.pid = pid; + child.killed = false; + child.stdin = new PassThrough(); + child.stdout = new PassThrough(); + child.stderr = new PassThrough(); + child.killCalls = []; + + let closed = false; + child.close = (code: number = 0) => { + if (closed) return; + closed = true; + child.stdout.end(); + child.stderr.end(); + child.emit('close', code); + }; + + child.kill = (signal?: string) => { + const sig = signal || 'SIGTERM'; + child.killCalls.push(sig); + + if (sig === 'SIGTERM') { + if (behavior.closeOnSigterm) { + child.killed = true; + queueMicrotask(() => child.close(0)); + } + return true; + } + + if (sig === 'SIGKILL') { + child.killed = true; + queueMicrotask(() => child.close(0)); + return true; + } + + return true; + }; + + return child; + } + + before(async () => { + envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR; + + ccwHome = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-kill-home-')); + projectDir = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-kill-project-')); + process.env.CCW_DATA_DIR = ccwHome; + + globalThis.setTimeout = unrefFastSetTimeout as unknown as typeof setTimeout; + + childProcess.spawn = (command: unknown, args: unknown[], options: Record) => { + const cmd = String(command); + const argv = Array.isArray(args) ? args.map((a) => String(a)) : []; + + // Tool lookup helpers. + if (cmd === 'where' || cmd === 'which') { + const child = createFakeChild({ closeOnSigterm: true }, 4000); + queueMicrotask(() => { + child.stdout.write(`C:\\\\fake\\\\${argv[0] || 'tool'}.cmd\r\n`); + child.close(0); + }); + return child; + } + + const behavior = plannedBehaviors.shift() ?? { closeOnSigterm: true }; + const child = createFakeChild(behavior, 5000 + toolChildren.length); + toolChildren.push(child); + + // Keep the process running until explicitly closed or killed. + return child; + }; + + cliExecutorModule = await import(cliExecutorUrl); + historyStoreModule = await import(historyStoreUrl); + }); + + after(async () => { + childProcess.spawn = originalSpawn; + globalThis.setTimeout = originalSetTimeout; + + try { + historyStoreModule?.closeAllStores?.(); + } catch { + // ignore + } + + if (projectDir) rmSync(projectDir, { recursive: true, force: true }); + if (ccwHome) rmSync(ccwHome, { recursive: true, force: true }); + + process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR; + }); + + it('does not kill a subsequent child via a stale SIGKILL timeout', async () => { + plannedBehaviors.push({ closeOnSigterm: true }); + plannedBehaviors.push({ closeOnSigterm: false }); + + const run1 = cliExecutorModule.handler({ tool: 'codex', prompt: 'test', cd: projectDir }); + await new Promise((resolve) => setImmediate(resolve)); + + assert.equal(cliExecutorModule.killCurrentCliProcess(), true); + await run1; + + const run2 = cliExecutorModule.handler({ tool: 'codex', prompt: 'test-2', cd: projectDir }); + await new Promise((resolve) => setImmediate(resolve)); + + // Wait long enough for the (patched) kill timeout to fire if not cleared. + await new Promise((resolve) => originalSetTimeout(resolve, 60)); + + assert.equal(toolChildren.length >= 2, true); + assert.deepEqual(toolChildren[1].killCalls, []); + + toolChildren[1].close(0); + await run2; + }); + + it('sends SIGKILL when SIGTERM does not terminate the process', async () => { + plannedBehaviors.push({ closeOnSigterm: false }); + + const run = cliExecutorModule.handler({ tool: 'codex', prompt: 'timeout-test', cd: projectDir }); + await new Promise((resolve) => setImmediate(resolve)); + + assert.equal(cliExecutorModule.killCurrentCliProcess(), true); + // Keep the event loop alive long enough for the (unref'd) timeout to fire. + await new Promise((resolve) => originalSetTimeout(resolve, 60)); + await run; + + assert.equal(toolChildren.length >= 1, true); + assert.ok(toolChildren[toolChildren.length - 1].killCalls.includes('SIGTERM')); + assert.ok(toolChildren[toolChildren.length - 1].killCalls.includes('SIGKILL')); + }); +}); diff --git a/ccw/tests/cli-executor-merge-validation.test.ts b/ccw/tests/cli-executor-merge-validation.test.ts new file mode 100644 index 00000000..cfe1216c --- /dev/null +++ b/ccw/tests/cli-executor-merge-validation.test.ts @@ -0,0 +1,173 @@ +/** + * Regression tests for conversation merge validation (DSC-008). + * + * Focus: + * - Merge with all invalid IDs returns a descriptive error including attempted IDs + * - Merge proceeds when at least one source conversation is valid + */ + +import { after, before, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { createRequire } from 'node:module'; +import { EventEmitter } from 'node:events'; +import { PassThrough } from 'node:stream'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const cliExecutorUrl = new URL('../dist/tools/cli-executor.js', import.meta.url).href; +const historyStoreUrl = new URL('../dist/tools/cli-history-store.js', import.meta.url).href; + +type FakeChild = EventEmitter & { + pid?: number; + killed: boolean; + stdin: PassThrough; + stdout: PassThrough; + stderr: PassThrough; + kill: (signal?: string) => boolean; + close: (code?: number) => void; +}; + +function createFakeChild(pid: number): FakeChild { + const child = new EventEmitter() as FakeChild; + child.pid = pid; + child.killed = false; + child.stdin = new PassThrough(); + child.stdout = new PassThrough(); + child.stderr = new PassThrough(); + + let closed = false; + child.close = (code: number = 0) => { + if (closed) return; + closed = true; + child.stdout.end(); + child.stderr.end(); + child.emit('close', code); + }; + + child.kill = (signal?: string) => { + child.killed = true; + queueMicrotask(() => child.close(0)); + return true; + }; + + return child; +} + +describe('cli-executor: merge validation regression', async () => { + const require = createRequire(import.meta.url); + const childProcess = require('child_process'); + const originalSpawn = childProcess.spawn; + + const envSnapshot: Record = {}; + let ccwHome = ''; + let projectDir = ''; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let cliExecutorModule: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let historyStoreModule: any; + + before(async () => { + envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR; + + ccwHome = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-merge-home-')); + projectDir = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-merge-project-')); + process.env.CCW_DATA_DIR = ccwHome; + + childProcess.spawn = (command: unknown, args: unknown[]) => { + const cmd = String(command); + const argv = Array.isArray(args) ? args.map((a) => String(a)) : []; + + // Tool lookup helpers. + if (cmd === 'where' || cmd === 'which') { + const child = createFakeChild(4000); + queueMicrotask(() => { + child.stdout.write(`C:\\\\fake\\\\${argv[0] || 'tool'}.cmd\r\n`); + child.close(0); + }); + return child; + } + + const child = createFakeChild(5000); + queueMicrotask(() => { + child.stdout.write('OK\n'); + child.close(0); + }); + return child; + }; + + historyStoreModule = await import(historyStoreUrl); + cliExecutorModule = await import(cliExecutorUrl); + }); + + after(() => { + childProcess.spawn = originalSpawn; + + try { + historyStoreModule?.closeAllStores?.(); + } catch { + // ignore + } + + if (projectDir) rmSync(projectDir, { recursive: true, force: true }); + if (ccwHome) rmSync(ccwHome, { recursive: true, force: true }); + process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR; + }); + + it('throws a descriptive error when all merge IDs are invalid', async () => { + await assert.rejects( + () => cliExecutorModule.cliExecutorTool.execute({ + tool: 'codex', + prompt: 'test', + cd: projectDir, + resume: 'MISSING-1, MISSING-2' + }), + (err: unknown) => { + assert.ok(err instanceof Error); + assert.ok(err.message.includes('No valid conversations found for merge')); + assert.ok(err.message.includes('MISSING-1')); + assert.ok(err.message.includes('MISSING-2')); + return true; + } + ); + }); + + it('merges when at least one source conversation is valid', async () => { + const store = historyStoreModule.getHistoryStore(projectDir); + store.saveConversation({ + id: 'CONV-MERGE-VALID-1', + created_at: new Date('2025-01-01T00:00:00.000Z').toISOString(), + updated_at: new Date('2025-01-01T00:00:01.000Z').toISOString(), + tool: 'codex', + model: 'default', + mode: 'analysis', + category: 'user', + total_duration_ms: 1, + turn_count: 1, + latest_status: 'success', + turns: [ + { + turn: 1, + timestamp: new Date('2025-01-01T00:00:00.000Z').toISOString(), + prompt: 'Previous prompt', + duration_ms: 1, + status: 'success', + exit_code: 0, + output: { stdout: 'Previous output', stderr: '', truncated: false, cached: false } + } + ] + }); + + const result = await cliExecutorModule.cliExecutorTool.execute({ + tool: 'codex', + prompt: 'Next prompt', + cd: projectDir, + resume: 'CONV-MERGE-VALID-1, MISSING-99' + }); + + assert.equal(result.success, true); + assert.ok(result.execution?.id); + }); +}); + diff --git a/ccw/tests/cors.test.ts b/ccw/tests/cors.test.ts new file mode 100644 index 00000000..781d65db --- /dev/null +++ b/ccw/tests/cors.test.ts @@ -0,0 +1,31 @@ +/** + * Unit tests for CORS origin validation (ccw/dist/core/cors.js) + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +const corsUrl = new URL('../dist/core/cors.js', import.meta.url).href; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let corsMod: any; + +describe('CORS origin validation', async () => { + corsMod = await import(corsUrl); + + it('allows localhost origins on the server port', () => { + assert.equal(corsMod.validateCorsOrigin('http://localhost:3456', 3456), true); + assert.equal(corsMod.validateCorsOrigin('http://127.0.0.1:3456', 3456), true); + }); + + it('rejects external origins', () => { + assert.equal(corsMod.validateCorsOrigin('http://evil.com', 3456), false); + assert.equal(corsMod.validateCorsOrigin('http://localhost:3457', 3456), false); + }); + + it('defaults missing or rejected Origin to localhost', () => { + assert.equal(corsMod.getCorsOrigin(undefined, 3456), 'http://localhost:3456'); + assert.equal(corsMod.getCorsOrigin('http://evil.com', 3456), 'http://localhost:3456'); + }); +}); + diff --git a/ccw/tests/csrf-manager.test.ts b/ccw/tests/csrf-manager.test.ts new file mode 100644 index 00000000..49a753e2 --- /dev/null +++ b/ccw/tests/csrf-manager.test.ts @@ -0,0 +1,64 @@ +/** + * Unit tests for CsrfTokenManager (ccw/dist/core/auth/csrf-manager.js). + * + * Notes: + * - Targets the runtime implementation shipped in `ccw/dist`. + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +const csrfManagerUrl = new URL('../dist/core/auth/csrf-manager.js', import.meta.url).href; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +describe('CsrfTokenManager', async () => { + mod = await import(csrfManagerUrl); + + it('generateToken produces a 64-character hex token', () => { + const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken('session-1'); + + assert.match(token, /^[a-f0-9]{64}$/); + manager.dispose(); + }); + + it('validateToken accepts correct session token once', () => { + const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken('session-1'); + + assert.equal(manager.validateToken(token, 'session-1'), true); + assert.equal(manager.validateToken(token, 'session-1'), false); + manager.dispose(); + }); + + it('validateToken rejects expired tokens', () => { + const manager = new mod.CsrfTokenManager({ tokenTtlMs: -1000, cleanupIntervalMs: 0 }); + const token = manager.generateToken('session-1'); + + assert.equal(manager.validateToken(token, 'session-1'), false); + assert.equal(manager.getActiveTokenCount(), 0); + manager.dispose(); + }); + + it('cleanupExpiredTokens removes expired entries', () => { + const manager = new mod.CsrfTokenManager({ tokenTtlMs: 10, cleanupIntervalMs: 0 }); + manager.generateToken('session-1'); + + const removed = manager.cleanupExpiredTokens(Date.now() + 100); + assert.equal(removed, 1); + assert.equal(manager.getActiveTokenCount(), 0); + manager.dispose(); + }); + + it('session association prevents cross-session token reuse', () => { + const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken('session-1'); + + assert.equal(manager.validateToken(token, 'session-2'), false); + assert.equal(manager.validateToken(token, 'session-1'), true); + manager.dispose(); + }); +}); + diff --git a/ccw/tests/csrf-middleware.test.ts b/ccw/tests/csrf-middleware.test.ts new file mode 100644 index 00000000..47c3dcec --- /dev/null +++ b/ccw/tests/csrf-middleware.test.ts @@ -0,0 +1,153 @@ +/** + * Unit tests for CSRF middleware (ccw/dist/core/auth/csrf-middleware.js) + */ + +import { afterEach, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { PassThrough } from 'node:stream'; + +type MockResponse = { + status: number | null; + headers: Record; + body: string; + writeHead: (status: number, headers?: Record) => void; + setHeader: (name: string, value: unknown) => void; + getHeader: (name: string) => unknown; + end: (body?: string) => void; +}; + +function createMockRes(): MockResponse { + const headers: Record = {}; + const response: MockResponse = { + status: null, + headers, + body: '', + writeHead: (status: number, nextHeaders?: Record) => { + response.status = status; + if (nextHeaders) { + for (const [k, v] of Object.entries(nextHeaders)) { + headers[k.toLowerCase()] = v; + } + } + }, + setHeader: (name: string, value: unknown) => { + headers[name.toLowerCase()] = value; + }, + getHeader: (name: string) => headers[name.toLowerCase()], + end: (body?: string) => { + response.body = body ? String(body) : ''; + }, + }; + return response; +} + +const middlewareUrl = new URL('../dist/core/auth/csrf-middleware.js', import.meta.url); + +const managerUrl = new URL('../dist/core/auth/csrf-manager.js', import.meta.url); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let middleware: any; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let csrfManager: any; + +const ORIGINAL_ENV = { ...process.env }; + +describe('csrf middleware', async () => { + middleware = await import(middlewareUrl.href); + csrfManager = await import(managerUrl.href); + + afterEach(() => { + csrfManager.resetCsrfTokenManager(); + process.env = { ...ORIGINAL_ENV }; + }); + + it('allows non-state-changing requests without tokens', async () => { + const req: any = { method: 'GET', headers: {} }; + const res = createMockRes(); + + const ok = await middleware.csrfValidation({ pathname: '/api/health', req, res }); + assert.equal(ok, true); + assert.equal(res.status, null); + }); + + it('rejects state-changing requests when tokens are missing', async () => { + const req = new PassThrough() as any; + req.method = 'POST'; + req.headers = {}; + const res = createMockRes(); + + const promise = middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res }); + queueMicrotask(() => { + req.end(); + }); + const ok = await promise; + assert.equal(ok, false); + assert.equal(res.status, 403); + assert.ok(res.body.includes('CSRF validation failed')); + }); + + it('accepts valid CSRF token from cookies and rotates token', async () => { + const sessionId = 'session-1'; + const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken(sessionId); + + const req: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } }; + const res = createMockRes(); + + const ok = await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res }); + assert.equal(ok, true); + + const rotated = res.headers['x-csrf-token']; + assert.ok(typeof rotated === 'string'); + assert.notEqual(rotated, token); + assert.match(rotated, /^[a-f0-9]{64}$/); + + const setCookie = res.headers['set-cookie']; + const cookieString = Array.isArray(setCookie) ? setCookie.join('\n') : String(setCookie ?? ''); + assert.ok(cookieString.includes('XSRF-TOKEN=')); + assert.ok(cookieString.includes(String(rotated))); + }); + + it('rejects token reuse', async () => { + const sessionId = 'session-1'; + const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken(sessionId); + + const req1: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } }; + const res1 = createMockRes(); + assert.equal(await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req: req1, res: res1 }), true); + + const req2: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } }; + const res2 = createMockRes(); + assert.equal(await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req: req2, res: res2 }), false); + assert.equal(res2.status, 403); + }); + + it('accepts valid CSRF token from JSON body when cookies are absent', async () => { + const sessionId = 'session-1'; + const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 }); + const token = manager.generateToken(sessionId); + + const req = new PassThrough() as any; + req.method = 'POST'; + req.headers = { cookie: `ccw_session_id=${sessionId}` }; + + const res = createMockRes(); + const promise = middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res }); + queueMicrotask(() => { + req.end(JSON.stringify({ csrfToken: token })); + }); + + const ok = await promise; + assert.equal(ok, true); + }); + + it('skips CSRF validation when CCW_DISABLE_CSRF is enabled', async () => { + process.env.CCW_DISABLE_CSRF = 'true'; + const req: any = { method: 'POST', headers: {} }; + const res = createMockRes(); + + const ok = await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res }); + assert.equal(ok, true); + }); +}); diff --git a/ccw/tests/files-routes.test.ts b/ccw/tests/files-routes.test.ts new file mode 100644 index 00000000..5872ea9c --- /dev/null +++ b/ccw/tests/files-routes.test.ts @@ -0,0 +1,167 @@ +/** + * Integration tests for files routes path validation. + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Focuses on access control for user-provided file paths. + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-files-routes-project-')); +const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-files-routes-outside-')); + +const filesRoutesUrl = new URL('../dist/core/routes/files-routes.js', import.meta.url); +filesRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise { + const url = new URL(path, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> { + const server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath, + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await mod.handleFilesRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + return { server, baseUrl: `http://127.0.0.1:${port}` }; +} + +describe('files routes path validation', async () => { + before(async () => { + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mod = await import(filesRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + rmSync(PROJECT_ROOT, { recursive: true, force: true }); + rmSync(OUTSIDE_ROOT, { recursive: true, force: true }); + }); + + it('GET /api/files rejects paths outside initialPath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(OUTSIDE_ROOT)}`); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + assert.equal(Array.isArray(res.json.files), true); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('GET /api/file-content rejects paths outside initialPath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(join(OUTSIDE_ROOT, 'secret.txt'))}`); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('POST /api/update-claude-md rejects paths outside initialPath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'POST', '/api/update-claude-md', { path: OUTSIDE_ROOT, tool: 'gemini', strategy: 'single-layer' }); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); +}); + diff --git a/ccw/tests/graph-routes.test.ts b/ccw/tests/graph-routes.test.ts new file mode 100644 index 00000000..ea4ffe8d --- /dev/null +++ b/ccw/tests/graph-routes.test.ts @@ -0,0 +1,145 @@ +/** + * Integration tests for graph routes path validation. + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Focuses on path validation behavior (rejects paths outside initialPath). + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-graph-routes-project-')); +const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-graph-routes-outside-')); + +const graphRoutesUrl = new URL('../dist/core/routes/graph-routes.js', import.meta.url); +graphRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise { + const url = new URL(path, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> { + const server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath, + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await mod.handleGraphRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + return { server, baseUrl: `http://127.0.0.1:${port}` }; +} + +describe('graph routes path validation', async () => { + before(async () => { + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mod = await import(graphRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + rmSync(PROJECT_ROOT, { recursive: true, force: true }); + rmSync(OUTSIDE_ROOT, { recursive: true, force: true }); + }); + + it('GET /api/graph/nodes rejects paths outside initialPath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', `/api/graph/nodes?path=${encodeURIComponent(OUTSIDE_ROOT)}`); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + assert.equal(Array.isArray(res.json.nodes), true); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); +}); + diff --git a/ccw/tests/integration/ccw-routes.test.ts b/ccw/tests/integration/ccw-routes.test.ts new file mode 100644 index 00000000..438f78de --- /dev/null +++ b/ccw/tests/integration/ccw-routes.test.ts @@ -0,0 +1,146 @@ +/** + * Integration tests for CCW routes (installations/tools). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Exercises real HTTP request/response flow via a minimal test server. + */ + +import { after, before, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; + +const ccwRoutesUrl = new URL('../../dist/core/routes/ccw-routes.js', import.meta.url); +ccwRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, path: string): Promise { + const url = new URL(path, baseUrl); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { method, headers: { Accept: 'application/json' } }, + (res) => { + let body = ''; + res.on('data', (chunk) => { + body += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = body ? JSON.parse(body) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: body }); + }); + }, + ); + req.on('error', reject); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +describe('ccw routes integration', async () => { + let server: http.Server | null = null; + let baseUrl = ''; + + before(async () => { + mod = await import(ccwRoutesUrl.href); + + server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath: process.cwd(), + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await mod.handleCcwRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => { + server!.listen(0, () => resolve()); + }); + + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + baseUrl = `http://127.0.0.1:${port}`; + }); + + after(async () => { + if (!server) return; + await new Promise((resolve) => server!.close(() => resolve())); + }); + + it('GET /api/ccw/installations returns installation manifests', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/ccw/installations'); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(Array.isArray(res.json.installations), true); + }); + + it('GET /api/ccw/tools returns available tools', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/ccw/tools'); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(Array.isArray(res.json.tools), true); + }); + + it('GET /api/ccw/upgrade returns 404 (POST-only endpoint)', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/ccw/upgrade'); + assert.equal(res.status, 404); + assert.ok(res.json?.error); + }); + + it('returns 404 for unknown /api/ccw/* routes', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/ccw/nope'); + assert.equal(res.status, 404); + assert.ok(res.json?.error); + }); +}); + diff --git a/ccw/tests/integration/claude-routes.test.ts b/ccw/tests/integration/claude-routes.test.ts new file mode 100644 index 00000000..0a99be26 --- /dev/null +++ b/ccw/tests/integration/claude-routes.test.ts @@ -0,0 +1,272 @@ +/** + * Integration tests for CLAUDE.md routes (scan + CRUD). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses temporary HOME/USERPROFILE to isolate user-level files. + * - Uses a temporary project root as initialPath for project/module operations. + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { existsSync, mkdirSync, mkdtempSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const claudeRoutesUrl = new URL('../../dist/core/routes/claude-routes.js', import.meta.url); +claudeRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson( + baseUrl: string, + method: string, + path: string, + body?: unknown, +): Promise { + const url = new URL(path, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload + ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } + : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +describe('claude routes integration', async () => { + let server: http.Server | null = null; + let baseUrl = ''; + let homeDir = ''; + let projectRoot = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-claude-home-')); + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-claude-project-')); + + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + mod = await import(claudeRoutesUrl.href); + + server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath: projectRoot, + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await mod.handleClaudeRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server!.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + baseUrl = `http://127.0.0.1:${port}`; + }); + + after(async () => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + if (server) { + await new Promise((resolve) => server!.close(() => resolve())); + server = null; + } + + if (projectRoot) { + rmSync(projectRoot, { recursive: true, force: true }); + projectRoot = ''; + } + + if (homeDir) { + rmSync(homeDir, { recursive: true, force: true }); + homeDir = ''; + } + }); + + it('POST /api/memory/claude/create creates a project-level CLAUDE.md', async () => { + const res = await requestJson(baseUrl, 'POST', '/api/memory/claude/create', { level: 'project', template: 'minimal' }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.ok(typeof res.json.path === 'string' && res.json.path.endsWith('CLAUDE.md')); + assert.equal(existsSync(res.json.path), true); + }); + + it('GET /api/memory/claude/file parses frontmatter for project CLAUDE.md', async () => { + const claudePath = join(projectRoot, '.claude', 'CLAUDE.md'); + mkdirSync(join(projectRoot, '.claude'), { recursive: true }); + writeFileSync( + claudePath, + ['---', 'paths: [src, docs]', '---', '', '# Project Rules', '', 'ok'].join('\n'), + 'utf8', + ); + + const res = await requestJson(baseUrl, 'GET', `/api/memory/claude/file?path=${encodeURIComponent(claudePath)}`); + assert.equal(res.status, 200); + assert.equal(res.json.level, 'project'); + assert.deepEqual(res.json.frontmatter?.paths, ['src', 'docs']); + assert.match(res.json.content, /# Project Rules/); + assert.equal(String(res.json.content).includes('paths:'), false); + }); + + it('POST /api/memory/claude/file saves updated content', async () => { + const claudePath = join(projectRoot, '.claude', 'CLAUDE.md'); + mkdirSync(join(projectRoot, '.claude'), { recursive: true }); + writeFileSync(claudePath, 'before\n', 'utf8'); + + const res = await requestJson(baseUrl, 'POST', '/api/memory/claude/file', { path: claudePath, content: 'after\n' }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.equal(readFileSync(claudePath, 'utf8'), 'after\n'); + }); + + it('GET /api/memory/claude/scan separates user/project/module levels', async () => { + const userClaudePath = join(homeDir, '.claude', 'CLAUDE.md'); + mkdirSync(join(homeDir, '.claude'), { recursive: true }); + writeFileSync(userClaudePath, '# User CLAUDE\n', 'utf8'); + + const projectClaudePath = join(projectRoot, '.claude', 'CLAUDE.md'); + mkdirSync(join(projectRoot, '.claude'), { recursive: true }); + writeFileSync(projectClaudePath, ['---', 'paths: [src]', '---', '', '# Project CLAUDE'].join('\n'), 'utf8'); + + const moduleDir = join(projectRoot, 'module-a'); + mkdirSync(moduleDir, { recursive: true }); + writeFileSync(join(moduleDir, 'CLAUDE.md'), '# Module CLAUDE\n', 'utf8'); + + const res = await requestJson(baseUrl, 'GET', `/api/memory/claude/scan?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.status, 200); + assert.equal(res.json.user?.main?.level, 'user'); + assert.ok(String(res.json.user.main.path).includes(homeDir)); + + assert.equal(res.json.project?.main?.level, 'project'); + assert.ok(String(res.json.project.main.path).includes(projectRoot)); + assert.deepEqual(res.json.project.main.frontmatter?.paths, ['src']); + assert.equal(String(res.json.project.main.content).includes('paths:'), false); + + assert.equal(Array.isArray(res.json.modules), true); + assert.ok(res.json.modules.length >= 1); + const moduleFile = res.json.modules.find((m: any) => String(m.path).includes('module-a')); + assert.ok(moduleFile); + assert.equal(moduleFile.level, 'module'); + assert.equal(moduleFile.parentDirectory, 'module-a'); + }); + + it('DELETE /api/memory/claude/file requires confirm=true', async () => { + const moduleDir = join(projectRoot, 'module-del'); + const moduleFilePath = join(moduleDir, 'CLAUDE.md'); + mkdirSync(moduleDir, { recursive: true }); + writeFileSync(moduleFilePath, '# To delete\n', 'utf8'); + + const res = await requestJson(baseUrl, 'DELETE', `/api/memory/claude/file?path=${encodeURIComponent(moduleFilePath)}`); + assert.equal(res.status, 400); + assert.equal(res.json?.error, 'Confirmation required'); + assert.equal(existsSync(moduleFilePath), true); + }); + + it('DELETE /api/memory/claude/file deletes the file and creates a backup', async () => { + const moduleDir = join(projectRoot, 'module-del-ok'); + const moduleFilePath = join(moduleDir, 'CLAUDE.md'); + mkdirSync(moduleDir, { recursive: true }); + writeFileSync(moduleFilePath, '# Bye\n', 'utf8'); + + const res = await requestJson( + baseUrl, + 'DELETE', + `/api/memory/claude/file?path=${encodeURIComponent(moduleFilePath)}&confirm=true`, + ); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.equal(existsSync(moduleFilePath), false); + + const backups = readdirSync(moduleDir).filter((name) => name.startsWith('CLAUDE.md.deleted-')); + assert.equal(backups.length, 1); + }); +}); + diff --git a/ccw/tests/integration/files-routes.test.ts b/ccw/tests/integration/files-routes.test.ts new file mode 100644 index 00000000..c7cde2be --- /dev/null +++ b/ccw/tests/integration/files-routes.test.ts @@ -0,0 +1,206 @@ +/** + * Integration tests for files routes (directory listing + file preview). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses a temporary project directory as the allowed root (initialPath). + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const filesRoutesUrl = new URL('../../dist/core/routes/files-routes.js', import.meta.url); +filesRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, path: string): Promise { + const url = new URL(path, baseUrl); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { method, headers: { Accept: 'application/json' } }, + (res) => { + let body = ''; + res.on('data', (chunk) => { + body += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = body ? JSON.parse(body) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: body }); + }); + }, + ); + req.on('error', reject); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +describe('files routes integration', async () => { + let server: http.Server | null = null; + let baseUrl = ''; + let projectRoot = ''; + + before(async () => { + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-files-routes-project-')); + + mkdirSync(join(projectRoot, 'subdir'), { recursive: true }); + mkdirSync(join(projectRoot, '.claude'), { recursive: true }); + mkdirSync(join(projectRoot, '.workflow'), { recursive: true }); + mkdirSync(join(projectRoot, 'node_modules'), { recursive: true }); + mkdirSync(join(projectRoot, 'ignored-dir'), { recursive: true }); + + writeFileSync(join(projectRoot, 'visible.txt'), 'ok\n', 'utf8'); + writeFileSync(join(projectRoot, 'ignored.txt'), 'nope\n', 'utf8'); + writeFileSync(join(projectRoot, '.secret'), 'hidden\n', 'utf8'); + writeFileSync(join(projectRoot, 'readme.md'), '# Hello\n', 'utf8'); + writeFileSync(join(projectRoot, '.gitignore'), ['ignored.txt', 'ignored-dir/'].join('\n') + '\n', 'utf8'); + + mock.method(console, 'error', () => {}); + mod = await import(filesRoutesUrl.href); + + server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath: projectRoot, + handlePostRequest, + }; + + try { + const handled = await mod.handleFilesRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => { + server!.listen(0, () => resolve()); + }); + + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + baseUrl = `http://127.0.0.1:${port}`; + }); + + after(async () => { + mock.restoreAll(); + if (server) { + await new Promise((resolve) => server!.close(() => resolve())); + server = null; + } + if (projectRoot) { + rmSync(projectRoot, { recursive: true, force: true }); + projectRoot = ''; + } + }); + + it('GET /api/files lists entries and respects gitignore/exclude rules', async () => { + const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(Array.isArray(res.json.files), true); + + const names = res.json.files.map((f: any) => f.name); + assert.ok(names.includes('subdir')); + assert.ok(names.includes('visible.txt')); + assert.ok(names.includes('.claude')); + assert.ok(names.includes('.workflow')); + + // Hidden dotfiles (except .claude/.workflow) are excluded. + assert.equal(names.includes('.secret'), false); + // Common excluded dirs are always removed. + assert.equal(names.includes('node_modules'), false); + // .gitignore patterns should be enforced. + assert.equal(names.includes('ignored.txt'), false); + assert.equal(names.includes('ignored-dir'), false); + assert.equal(Array.isArray(res.json.gitignorePatterns), true); + assert.ok(res.json.gitignorePatterns.includes('ignored.txt')); + }); + + it('GET /api/files returns 400 for non-existent path', async () => { + const missing = join(projectRoot, 'missing-dir'); + const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(missing)}`); + assert.equal(res.status, 400); + assert.equal(res.json?.error, 'Invalid path'); + assert.equal(Array.isArray(res.json?.files), true); + assert.equal(res.json.files.length, 0); + }); + + it('GET /api/files blocks traversal outside initialPath', async () => { + const outside = join(projectRoot, '..'); + const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(outside)}`); + assert.equal(res.status, 403); + assert.equal(res.json?.error, 'Access denied'); + }); + + it('GET /api/file-content returns preview content for files', async () => { + const target = join(projectRoot, 'readme.md'); + const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(target)}`); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(res.json.fileName, 'readme.md'); + assert.equal(res.json.language, 'markdown'); + assert.equal(res.json.isMarkdown, true); + assert.ok(String(res.json.content).includes('# Hello')); + }); + + it('GET /api/file-content returns 400 when path is missing', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/file-content'); + assert.equal(res.status, 400); + assert.ok(res.json?.error); + }); + + it('GET /api/file-content returns 404 when path is a directory', async () => { + const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.status, 404); + assert.equal(res.json?.error, 'Cannot read directory'); + }); +}); diff --git a/ccw/tests/integration/graph-routes.test.ts b/ccw/tests/integration/graph-routes.test.ts new file mode 100644 index 00000000..f8d36b85 --- /dev/null +++ b/ccw/tests/integration/graph-routes.test.ts @@ -0,0 +1,93 @@ +/** + * Integration tests for graph routes (CodexLens graph API helpers). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Calls route handler directly (no HTTP server required). + */ +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const graphRoutesUrl = new URL('../../dist/core/routes/graph-routes.js', import.meta.url); +graphRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +async function callGraph( + projectRoot: string, + path: string, +): Promise<{ handled: boolean; status: number; json: any }> { + const url = new URL(path, 'http://localhost'); + let status = 0; + let body = ''; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + body = chunk === undefined ? '' : String(chunk); + }, + }; + + const handled = await mod.handleGraphRoutes({ + pathname: url.pathname, + url, + req: { method: 'GET' }, + res, + initialPath: projectRoot, + }); + + return { handled, status, json: body ? JSON.parse(body) : null }; +} + +describe('graph routes integration', async () => { + let projectRoot = ''; + + before(async () => { + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-graph-project-')); + mock.method(console, 'error', () => {}); + mod = await import(graphRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + if (projectRoot) { + rmSync(projectRoot, { recursive: true, force: true }); + projectRoot = ''; + } + }); + + it('GET /api/graph/search-process returns placeholder pipeline data', async () => { + const res = await callGraph(projectRoot, '/api/graph/search-process'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.stages), true); + assert.equal(res.json.stages.length, 5); + assert.equal(typeof res.json.message, 'string'); + }); + + it('GET /api/graph/files returns empty lists when no index exists', async () => { + const res = await callGraph(projectRoot, `/api/graph/files?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.files), true); + assert.equal(Array.isArray(res.json.modules), true); + assert.equal(res.json.files.length, 0); + assert.equal(res.json.modules.length, 0); + }); + + it('GET /api/graph/impact validates required symbol parameter', async () => { + const res = await callGraph(projectRoot, `/api/graph/impact?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.handled, true); + assert.equal(res.status, 400); + assert.ok(String(res.json.error).includes('symbol')); + assert.equal(Array.isArray(res.json.directDependents), true); + assert.equal(Array.isArray(res.json.affectedFiles), true); + }); +}); + diff --git a/ccw/tests/integration/help-routes.test.ts b/ccw/tests/integration/help-routes.test.ts new file mode 100644 index 00000000..27328d80 --- /dev/null +++ b/ccw/tests/integration/help-routes.test.ts @@ -0,0 +1,174 @@ +/** + * Integration tests for help routes (command guide + CodexLens docs). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Avoids spinning up a real HTTP server; calls route handler directly. + * - Uses a temporary HOME/USERPROFILE to isolate ~/.claude/skills/command-guide/index data. + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const helpRoutesUrl = new URL('../../dist/core/routes/help-routes.js', import.meta.url); +helpRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +async function callRoute(path: string): Promise<{ handled: boolean; status: number; json: any; text: string }> { + const url = new URL(path, 'http://localhost'); + let status = 0; + let text = ''; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + text = chunk === undefined ? '' : String(chunk); + }, + }; + + const ctx = { + pathname: url.pathname, + url, + req: { method: 'GET' }, + res, + }; + + const handled = await mod.handleHelpRoutes(ctx); + + let json: any = null; + try { + json = text ? JSON.parse(text) : null; + } catch { + json = null; + } + + return { handled, status, json, text }; +} + +describe('help routes integration', async () => { + let homeDir = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-help-home-')); + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'log', () => {}); + mock.method(console, 'warn', () => {}); + mock.method(console, 'error', () => {}); + + const indexDir = join(homeDir, '.claude', 'skills', 'command-guide', 'index'); + mkdirSync(indexDir, { recursive: true }); + + writeFileSync( + join(indexDir, 'all-commands.json'), + JSON.stringify( + [ + { name: 'Issue Next', command: 'ccw issue next', description: 'Fetch next item', category: 'issue', subcategory: 'queue' }, + { name: 'Serve', command: 'ccw serve', description: 'Start dashboard server', category: 'core' }, + ], + null, + 2, + ), + 'utf8', + ); + + writeFileSync( + join(indexDir, 'command-relationships.json'), + JSON.stringify({ workflows: [{ name: 'Issue Queue', commands: ['ccw issue next', 'ccw issue done'] }] }, null, 2), + 'utf8', + ); + + writeFileSync( + join(indexDir, 'by-category.json'), + JSON.stringify({ issue: ['ccw issue next'], core: ['ccw serve'] }, null, 2), + 'utf8', + ); + + mod = await import(helpRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + const activeHandles: any[] = (process as any)._getActiveHandles?.() || []; + for (const handle of activeHandles) { + if (handle?.constructor?.name === 'FSWatcher' && typeof handle.close === 'function') { + try { + handle.close(); + } catch { + // ignore + } + } + } + + if (homeDir) { + rmSync(homeDir, { recursive: true, force: true }); + homeDir = ''; + } + }); + + it('GET /api/help/commands returns commands and grouped categories', async () => { + const res = await callRoute('/api/help/commands'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.commands), true); + assert.equal(res.json.total, 2); + assert.equal(typeof res.json.grouped, 'object'); + assert.ok(res.json.grouped.issue); + }); + + it('GET /api/help/commands?q filters commands by search query', async () => { + const res = await callRoute('/api/help/commands?q=issue'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(res.json.total, 1); + assert.equal(res.json.commands[0].command, 'ccw issue next'); + }); + + it('GET /api/help/workflows returns workflow relationships data', async () => { + const res = await callRoute('/api/help/workflows'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.workflows), true); + assert.equal(res.json.workflows[0].name, 'Issue Queue'); + }); + + it('GET /api/help/commands/by-category returns category index data', async () => { + const res = await callRoute('/api/help/commands/by-category'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.issue), true); + assert.equal(res.json.issue[0], 'ccw issue next'); + }); + + it('GET /api/help/codexlens returns CodexLens quick start content', async () => { + const res = await callRoute('/api/help/codexlens'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(res.json.title, 'CodexLens Quick Start'); + assert.equal(Array.isArray(res.json.sections), true); + assert.ok(res.json.sections.length > 0); + }); +}); + diff --git a/ccw/tests/integration/hooks-routes.test.ts b/ccw/tests/integration/hooks-routes.test.ts new file mode 100644 index 00000000..f69349ab --- /dev/null +++ b/ccw/tests/integration/hooks-routes.test.ts @@ -0,0 +1,159 @@ +/** + * Integration tests for hooks routes (hooks configuration CRUD). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses temporary HOME/USERPROFILE for global settings isolation. + * - Calls route handler directly (no HTTP server required). + */ + +import { after, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const hooksRoutesUrl = new URL('../../dist/core/routes/hooks-routes.js', import.meta.url); +hooksRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +async function callHooks( + initialPath: string, + method: string, + pathname: string, + body?: any, +): Promise<{ handled: boolean; status: number; json: any }> { + const url = new URL(pathname, 'http://localhost'); + let status = 0; + let text = ''; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + text = chunk === undefined ? '' : String(chunk); + }, + }; + + const handlePostRequest = async (_req: any, _res: any, handler: (parsed: any) => Promise) => { + const result = await handler(body ?? {}); + if (result && typeof result === 'object' && typeof result.error === 'string' && result.error.length > 0) { + res.writeHead(typeof result.status === 'number' ? result.status : 500); + res.end(JSON.stringify({ error: result.error })); + return; + } + res.writeHead(200); + res.end(JSON.stringify(result)); + }; + + const handled = await mod.handleHooksRoutes({ + pathname: url.pathname, + url, + req: { method }, + res, + initialPath, + handlePostRequest, + broadcastToClients() {}, + extractSessionIdFromPath() { + return null; + }, + }); + + return { handled, status, json: text ? JSON.parse(text) : null }; +} + +describe('hooks routes integration', async () => { + let homeDir = ''; + let projectRoot = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-hooks-home-')); + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-hooks-project-')); + + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'log', () => {}); + mock.method(console, 'warn', () => {}); + mock.method(console, 'error', () => {}); + + mod = await import(hooksRoutesUrl.href); + }); + + beforeEach(() => { + rmSync(join(homeDir, '.claude'), { recursive: true, force: true }); + rmSync(join(projectRoot, '.claude'), { recursive: true, force: true }); + }); + + after(() => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(homeDir, { recursive: true, force: true }); + }); + + it('GET /api/hooks returns global and project hook configs', async () => { + const res = await callHooks(projectRoot, 'GET', '/api/hooks'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.ok(res.json.global); + assert.ok(res.json.project); + assert.deepEqual(res.json.global.hooks, {}); + assert.deepEqual(res.json.project.hooks, {}); + }); + + it('POST /api/hooks saves a global hook and GET reflects it', async () => { + const save = await callHooks(projectRoot, 'POST', '/api/hooks', { + scope: 'global', + event: 'PreToolUse', + hookData: { command: 'echo hi' }, + }); + assert.equal(save.handled, true); + assert.equal(save.status, 200); + assert.equal(save.json.success, true); + + const read = await callHooks(projectRoot, 'GET', '/api/hooks'); + assert.equal(read.status, 200); + assert.equal(Array.isArray(read.json.global.hooks.PreToolUse), true); + assert.equal(read.json.global.hooks.PreToolUse.length, 1); + assert.equal(read.json.global.hooks.PreToolUse[0].command, 'echo hi'); + }); + + it('DELETE /api/hooks removes a hook by index', async () => { + await callHooks(projectRoot, 'POST', '/api/hooks', { + scope: 'global', + event: 'PreToolUse', + hookData: { command: 'echo hi' }, + }); + + const del = await callHooks(projectRoot, 'DELETE', '/api/hooks', { + scope: 'global', + event: 'PreToolUse', + hookIndex: 0, + }); + assert.equal(del.status, 200); + assert.equal(del.json.success, true); + + const read = await callHooks(projectRoot, 'GET', '/api/hooks'); + assert.equal(read.status, 200); + assert.deepEqual(read.json.global.hooks, {}); + }); +}); + diff --git a/ccw/tests/integration/issue-routes.test.ts b/ccw/tests/integration/issue-routes.test.ts new file mode 100644 index 00000000..d6004fc0 --- /dev/null +++ b/ccw/tests/integration/issue-routes.test.ts @@ -0,0 +1,296 @@ +/** + * Integration tests for issue routes (issues + solutions + queue). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses a temporary project root to isolate `.workflow/issues` JSONL storage. + */ + +import { after, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { existsSync, mkdtempSync, readFileSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const issueRoutesUrl = new URL('../../dist/core/routes/issue-routes.js', import.meta.url); +issueRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson( + baseUrl: string, + method: string, + path: string, + body?: unknown, +): Promise { + const url = new URL(path, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload + ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } + : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: any) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +function readJsonl(path: string): any[] { + if (!existsSync(path)) return []; + return readFileSync(path, 'utf8') + .split('\n') + .filter((line) => line.trim().length > 0) + .map((line) => JSON.parse(line)); +} + +describe('issue routes integration', async () => { + let server: http.Server | null = null; + let baseUrl = ''; + let projectRoot = ''; + + before(async () => { + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-issue-routes-project-')); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + mod = await import(issueRoutesUrl.href); + + server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath: projectRoot, + handlePostRequest, + }; + + try { + const handled = await mod.handleIssueRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server!.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + baseUrl = `http://127.0.0.1:${port}`; + }); + + beforeEach(() => { + rmSync(join(projectRoot, '.workflow'), { recursive: true, force: true }); + }); + + after(async () => { + mock.restoreAll(); + if (server) { + await new Promise((resolve) => server!.close(() => resolve())); + server = null; + } + if (projectRoot) { + rmSync(projectRoot, { recursive: true, force: true }); + projectRoot = ''; + } + }); + + it('GET /api/issues returns empty issues list with metadata', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/issues'); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(Array.isArray(res.json.issues), true); + assert.equal(res.json.issues.length, 0); + assert.equal(res.json._metadata.storage, 'jsonl'); + }); + + it('POST /api/issues creates a new issue and writes JSONL', async () => { + const issueId = 'ISS-IR-1'; + const res = await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Issue routes test' }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.equal(res.json.issue.id, issueId); + + const issuesPath = join(projectRoot, '.workflow', 'issues', 'issues.jsonl'); + const lines = readJsonl(issuesPath); + assert.equal(lines.length, 1); + assert.equal(lines[0].id, issueId); + assert.equal(typeof lines[0].created_at, 'string'); + }); + + it('GET /api/issues returns enriched issue list with counts', async () => { + const issueId = 'ISS-IR-2'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Counts' }); + + const res = await requestJson(baseUrl, 'GET', '/api/issues'); + assert.equal(res.status, 200); + const issue = res.json.issues.find((i: any) => i.id === issueId); + assert.ok(issue); + assert.equal(issue.solution_count, 0); + assert.equal(issue.task_count, 0); + }); + + it('GET /api/issues/:id returns issue detail with solutions/tasks arrays', async () => { + const issueId = 'ISS-IR-3'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Detail' }); + + const res = await requestJson(baseUrl, 'GET', `/api/issues/${encodeURIComponent(issueId)}`); + assert.equal(res.status, 200); + assert.equal(res.json.id, issueId); + assert.equal(Array.isArray(res.json.solutions), true); + assert.equal(Array.isArray(res.json.tasks), true); + assert.equal(res.json.solutions.length, 0); + assert.equal(res.json.tasks.length, 0); + }); + + it('POST /api/issues/:id/solutions appends a solution to solutions JSONL', async () => { + const issueId = 'ISS-IR-4'; + const solutionId = 'SOL-ISS-IR-4-1'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Solution add' }); + + const tasks = [{ id: 'T1', title: 'Do thing' }]; + const res = await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.equal(res.json.solution.id, solutionId); + assert.equal(res.json.solution.is_bound, false); + + const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`); + const lines = readJsonl(solutionsPath); + assert.equal(lines.length, 1); + assert.equal(lines[0].id, solutionId); + assert.equal(Array.isArray(lines[0].tasks), true); + }); + + it('PATCH /api/issues/:id binds solution and updates planned status', async () => { + const issueId = 'ISS-IR-5'; + const solutionId = 'SOL-ISS-IR-5-1'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Bind' }); + await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1' }] }); + + const res = await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}`, { bound_solution_id: solutionId }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.ok(res.json.updated.includes('bound_solution_id')); + + const detail = await requestJson(baseUrl, 'GET', `/api/issues/${encodeURIComponent(issueId)}`); + assert.equal(detail.status, 200); + assert.equal(detail.json.bound_solution_id, solutionId); + assert.equal(detail.json.status, 'planned'); + assert.ok(detail.json.planned_at); + assert.equal(detail.json.tasks.length, 1); + + const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`); + const lines = readJsonl(solutionsPath); + assert.equal(lines.length, 1); + assert.equal(lines[0].is_bound, true); + }); + + it('PATCH /api/issues/:id/tasks/:taskId updates bound solution task fields', async () => { + const issueId = 'ISS-IR-6'; + const solutionId = 'SOL-ISS-IR-6-1'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Task update' }); + await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1', status: 'pending' }] }); + await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}`, { bound_solution_id: solutionId }); + + const res = await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}/tasks/T1`, { status: 'completed', result: { ok: true } }); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + assert.ok(res.json.updated.includes('status')); + assert.ok(res.json.updated.includes('result')); + + const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`); + const lines = readJsonl(solutionsPath); + const task = lines[0].tasks.find((t: any) => t.id === 'T1'); + assert.equal(task.status, 'completed'); + assert.deepEqual(task.result, { ok: true }); + assert.ok(task.updated_at); + }); + + it('DELETE /api/issues/:id removes issue and deletes solutions JSONL', async () => { + const issueId = 'ISS-IR-7'; + const solutionId = 'SOL-ISS-IR-7-1'; + await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Delete me' }); + await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1' }] }); + + const res = await requestJson(baseUrl, 'DELETE', `/api/issues/${encodeURIComponent(issueId)}`); + assert.equal(res.status, 200); + assert.equal(res.json?.success, true); + + const issuesPath = join(projectRoot, '.workflow', 'issues', 'issues.jsonl'); + assert.equal(readJsonl(issuesPath).length, 0); + + const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`); + assert.equal(existsSync(solutionsPath), false); + }); + + it('GET /api/queue returns grouped queue structure', async () => { + const res = await requestJson(baseUrl, 'GET', '/api/queue'); + assert.equal(res.status, 200); + assert.ok(res.json); + assert.equal(Array.isArray(res.json.execution_groups), true); + assert.equal(typeof res.json.grouped_items, 'object'); + }); +}); + diff --git a/ccw/tests/integration/litellm-api-routes.test.ts b/ccw/tests/integration/litellm-api-routes.test.ts new file mode 100644 index 00000000..55f081bb --- /dev/null +++ b/ccw/tests/integration/litellm-api-routes.test.ts @@ -0,0 +1,118 @@ +/** + * Integration tests for LiteLLM API routes (providers + model discovery). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Calls route handler directly (no HTTP server required). + * - Uses temporary CCW_DATA_DIR to isolate ~/.ccw config writes. + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const CCW_HOME = mkdtempSync(join(tmpdir(), 'ccw-litellm-api-home-')); +const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-litellm-api-project-')); + +const litellmApiRoutesUrl = new URL('../../dist/core/routes/litellm-api-routes.js', import.meta.url); +litellmApiRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { CCW_DATA_DIR: process.env.CCW_DATA_DIR }; + +async function callLiteLLMApi( + initialPath: string, + method: string, + path: string, + body?: any, +): Promise<{ handled: boolean; status: number; json: any; broadcasts: any[] }> { + const url = new URL(path, 'http://localhost'); + let status = 0; + let text = ''; + const broadcasts: any[] = []; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + text = chunk === undefined ? '' : String(chunk); + }, + }; + + const handlePostRequest = async (_req: any, _res: any, handler: (parsed: any) => Promise) => { + const result = await handler(body ?? {}); + const errorValue = result && typeof result === 'object' ? (result as any).error : undefined; + const statusValue = result && typeof result === 'object' ? (result as any).status : undefined; + + if (typeof errorValue === 'string' && errorValue.length > 0) { + res.writeHead(typeof statusValue === 'number' ? statusValue : 500); + res.end(JSON.stringify({ error: errorValue })); + return; + } + + res.writeHead(200); + res.end(JSON.stringify(result)); + }; + + const handled = await mod.handleLiteLLMApiRoutes({ + pathname: url.pathname, + url, + req: { method }, + res, + initialPath, + handlePostRequest, + broadcastToClients(data: unknown) { + broadcasts.push(data); + }, + }); + + return { handled, status, json: text ? JSON.parse(text) : null, broadcasts }; +} + +describe('litellm-api routes integration', async () => { + before(async () => { + process.env.CCW_DATA_DIR = CCW_HOME; + mock.method(console, 'log', () => {}); + mock.method(console, 'warn', () => {}); + mock.method(console, 'error', () => {}); + mod = await import(litellmApiRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + process.env.CCW_DATA_DIR = originalEnv.CCW_DATA_DIR; + rmSync(CCW_HOME, { recursive: true, force: true }); + rmSync(PROJECT_ROOT, { recursive: true, force: true }); + }); + + it('GET /api/litellm-api/models/openai returns static model list', async () => { + const res = await callLiteLLMApi(PROJECT_ROOT, 'GET', '/api/litellm-api/models/openai'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(res.json.providerType, 'openai'); + assert.equal(Array.isArray(res.json.models), true); + assert.ok(res.json.models.length > 0); + }); + + it('GET /api/litellm-api/providers returns default empty config', async () => { + const res = await callLiteLLMApi(PROJECT_ROOT, 'GET', '/api/litellm-api/providers'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.providers), true); + assert.equal(typeof res.json.count, 'number'); + }); + + it('POST /api/litellm-api/providers validates required fields', async () => { + const res = await callLiteLLMApi(PROJECT_ROOT, 'POST', '/api/litellm-api/providers', { name: 'x' }); + assert.equal(res.handled, true); + assert.equal(res.status, 400); + assert.ok(String(res.json.error).includes('required')); + assert.equal(res.broadcasts.length, 0); + }); +}); + diff --git a/ccw/tests/integration/nav-status-routes.test.ts b/ccw/tests/integration/nav-status-routes.test.ts new file mode 100644 index 00000000..b8b484dc --- /dev/null +++ b/ccw/tests/integration/nav-status-routes.test.ts @@ -0,0 +1,182 @@ +/** + * Integration tests for nav-status routes (badge count aggregation). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Calls route handler directly (no HTTP server required). + * - Uses temporary HOME/USERPROFILE and project root to isolate filesystem reads. + */ + +import { after, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { existsSync, mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const navStatusRoutesUrl = new URL('../../dist/core/routes/nav-status-routes.js', import.meta.url); +navStatusRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +async function getNavStatus(projectRoot: string): Promise<{ status: number; json: any }> { + const url = new URL('/api/nav-status', 'http://localhost'); + let status = 0; + let body = ''; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + body = chunk === undefined ? '' : String(chunk); + }, + }; + + const handled = await mod.handleNavStatusRoutes({ + pathname: '/api/nav-status', + url, + req: { method: 'GET' }, + res, + initialPath: projectRoot, + }); + + assert.equal(handled, true); + return { status, json: JSON.parse(body) }; +} + +describe('nav-status routes integration', async () => { + let homeDir = ''; + let projectRoot = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-nav-home-')); + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-nav-project-')); + + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'error', () => {}); + mod = await import(navStatusRoutesUrl.href); + }); + + beforeEach(() => { + // Reset relevant trees per test. + rmSync(join(projectRoot, '.workflow'), { recursive: true, force: true }); + rmSync(join(projectRoot, '.claude'), { recursive: true, force: true }); + rmSync(join(homeDir, '.claude'), { recursive: true, force: true }); + + const rootClaude = join(projectRoot, 'CLAUDE.md'); + if (existsSync(rootClaude)) rmSync(rootClaude, { force: true }); + }); + + after(() => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(homeDir, { recursive: true, force: true }); + }); + + it('returns zero counts when no data exists', async () => { + const res = await getNavStatus(projectRoot); + assert.equal(res.status, 200); + assert.ok(res.json); + + for (const key of ['issues', 'discoveries', 'skills', 'rules', 'claude', 'hooks', 'timestamp']) { + assert.ok(Object.prototype.hasOwnProperty.call(res.json, key), `missing key: ${key}`); + } + + assert.equal(res.json.issues.count, 0); + assert.equal(res.json.discoveries.count, 0); + assert.equal(res.json.skills.count, 0); + assert.equal(res.json.rules.count, 0); + assert.equal(res.json.claude.count, 0); + assert.equal(res.json.hooks.count, 0); + assert.equal(typeof res.json.timestamp, 'string'); + }); + + it('counts issues.jsonl lines and discovery index entries', async () => { + const issuesDir = join(projectRoot, '.workflow', 'issues'); + const discoveriesDir = join(issuesDir, 'discoveries'); + mkdirSync(discoveriesDir, { recursive: true }); + + writeFileSync(join(issuesDir, 'issues.jsonl'), '{"id":"ISS-1"}\n{"id":"ISS-2"}\n', 'utf8'); + writeFileSync(join(discoveriesDir, 'index.json'), JSON.stringify({ discoveries: [{ id: 'DSC-1' }, { id: 'DSC-2' }, { id: 'DSC-3' }] }), 'utf8'); + + const res = await getNavStatus(projectRoot); + assert.equal(res.status, 200); + assert.equal(res.json.issues.count, 2); + assert.equal(res.json.discoveries.count, 3); + }); + + it('aggregates skills, rules, CLAUDE.md files, and hooks across user/project', async () => { + // Skills + mkdirSync(join(projectRoot, '.claude', 'skills', 'proj-skill'), { recursive: true }); + writeFileSync(join(projectRoot, '.claude', 'skills', 'proj-skill', 'SKILL.md'), '# skill\n', 'utf8'); + mkdirSync(join(homeDir, '.claude', 'skills', 'user-skill-1'), { recursive: true }); + mkdirSync(join(homeDir, '.claude', 'skills', 'user-skill-2'), { recursive: true }); + writeFileSync(join(homeDir, '.claude', 'skills', 'user-skill-1', 'SKILL.md'), '# skill\n', 'utf8'); + writeFileSync(join(homeDir, '.claude', 'skills', 'user-skill-2', 'SKILL.md'), '# skill\n', 'utf8'); + + // Rules (recursive) + mkdirSync(join(projectRoot, '.claude', 'rules', 'nested'), { recursive: true }); + writeFileSync(join(projectRoot, '.claude', 'rules', 'a.md'), '# a\n', 'utf8'); + writeFileSync(join(projectRoot, '.claude', 'rules', 'nested', 'b.md'), '# b\n', 'utf8'); + mkdirSync(join(homeDir, '.claude', 'rules'), { recursive: true }); + writeFileSync(join(homeDir, '.claude', 'rules', 'c.md'), '# c\n', 'utf8'); + + // CLAUDE.md files (user main + project main + root + module) + mkdirSync(join(homeDir, '.claude'), { recursive: true }); + writeFileSync(join(homeDir, '.claude', 'CLAUDE.md'), '# user\n', 'utf8'); + mkdirSync(join(projectRoot, '.claude'), { recursive: true }); + writeFileSync(join(projectRoot, '.claude', 'CLAUDE.md'), '# project\n', 'utf8'); + writeFileSync(join(projectRoot, 'CLAUDE.md'), '# root\n', 'utf8'); + const moduleDir = join(projectRoot, 'module-a'); + mkdirSync(moduleDir, { recursive: true }); + writeFileSync(join(moduleDir, 'CLAUDE.md'), '# module\n', 'utf8'); + + // Hooks in settings.json + mkdirSync(join(homeDir, '.claude'), { recursive: true }); + writeFileSync( + join(homeDir, '.claude', 'settings.json'), + JSON.stringify({ hooks: { PreToolUse: [{}, {}], PostToolUse: {} } }), + 'utf8', + ); + writeFileSync( + join(projectRoot, '.claude', 'settings.json'), + JSON.stringify({ hooks: { PreToolUse: [{}] } }), + 'utf8', + ); + + const res = await getNavStatus(projectRoot); + assert.equal(res.status, 200); + + assert.equal(res.json.skills.project, 1); + assert.equal(res.json.skills.user, 2); + assert.equal(res.json.skills.count, 3); + + assert.equal(res.json.rules.project, 2); + assert.equal(res.json.rules.user, 1); + assert.equal(res.json.rules.count, 3); + + assert.equal(res.json.claude.count, 4); + + assert.equal(res.json.hooks.global, 3); + assert.equal(res.json.hooks.project, 1); + assert.equal(res.json.hooks.count, 4); + }); +}); + diff --git a/ccw/tests/integration/rules-routes.test.ts b/ccw/tests/integration/rules-routes.test.ts new file mode 100644 index 00000000..f2498ddf --- /dev/null +++ b/ccw/tests/integration/rules-routes.test.ts @@ -0,0 +1,153 @@ +/** + * Integration tests for rules routes (rules management CRUD). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Calls route handler directly (no HTTP server required). + * - Uses temporary HOME/USERPROFILE to isolate user rules directory. + */ + +import { after, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { existsSync, mkdtempSync, readFileSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const rulesRoutesUrl = new URL('../../dist/core/routes/rules-routes.js', import.meta.url); +rulesRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +async function callRules( + initialPath: string, + method: string, + path: string, + body?: any, +): Promise<{ handled: boolean; status: number; json: any }> { + const url = new URL(path, 'http://localhost'); + let status = 0; + let text = ''; + let postPromise: Promise | null = null; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + text = chunk === undefined ? '' : String(chunk); + }, + }; + + const handlePostRequest = (_req: any, _res: any, handler: (parsed: any) => Promise) => { + postPromise = (async () => { + const result = await handler(body ?? {}); + const errorValue = result && typeof result === 'object' ? (result as any).error : undefined; + const statusValue = result && typeof result === 'object' ? (result as any).status : undefined; + + if (typeof errorValue === 'string' && errorValue.length > 0) { + res.writeHead(typeof statusValue === 'number' ? statusValue : 500); + res.end(JSON.stringify({ error: errorValue })); + return; + } + + res.writeHead(200); + res.end(JSON.stringify(result)); + })(); + }; + + const handled = await mod.handleRulesRoutes({ + pathname: url.pathname, + url, + req: { method }, + res, + initialPath, + handlePostRequest, + }); + + if (postPromise) await postPromise; + + return { handled, status, json: text ? JSON.parse(text) : null }; +} + +describe('rules routes integration', async () => { + let homeDir = ''; + let projectRoot = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-rules-home-')); + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-rules-project-')); + + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'log', () => {}); + mock.method(console, 'warn', () => {}); + mock.method(console, 'error', () => {}); + + mod = await import(rulesRoutesUrl.href); + }); + + beforeEach(() => { + rmSync(join(homeDir, '.claude'), { recursive: true, force: true }); + rmSync(join(projectRoot, '.claude'), { recursive: true, force: true }); + }); + + after(() => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(homeDir, { recursive: true, force: true }); + }); + + it('GET /api/rules returns projectRules and userRules arrays', async () => { + const res = await callRules(projectRoot, 'GET', '/api/rules'); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.projectRules), true); + assert.equal(Array.isArray(res.json.userRules), true); + }); + + it('POST /api/rules/create writes a project rule and GET reflects it', async () => { + const create = await callRules(projectRoot, 'POST', '/api/rules/create', { + fileName: 'test-rule.md', + content: '# Hello rule\n', + paths: ['src/**'], + location: 'project', + }); + + assert.equal(create.handled, true); + assert.equal(create.status, 200); + assert.equal(create.json.success, true); + assert.ok(typeof create.json.path === 'string' && create.json.path.length > 0); + assert.equal(existsSync(create.json.path), true); + + const config = await callRules(projectRoot, 'GET', '/api/rules'); + assert.equal(config.status, 200); + assert.equal(config.json.projectRules.length, 1); + assert.equal(config.json.projectRules[0].name, 'test-rule.md'); + + const detail = await callRules(projectRoot, 'GET', '/api/rules/test-rule.md?location=project'); + assert.equal(detail.status, 200); + assert.equal(detail.json.rule.name, 'test-rule.md'); + assert.ok(String(detail.json.rule.content).includes('Hello rule')); + + // Ensure frontmatter was persisted. + const raw = readFileSync(create.json.path, 'utf8'); + assert.ok(raw.startsWith('---')); + assert.ok(raw.includes('paths: [src/**]')); + }); +}); diff --git a/ccw/tests/integration/skills-routes.test.ts b/ccw/tests/integration/skills-routes.test.ts new file mode 100644 index 00000000..93e56ec7 --- /dev/null +++ b/ccw/tests/integration/skills-routes.test.ts @@ -0,0 +1,140 @@ +/** + * Integration tests for skills routes (skills listing + details). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Calls route handler directly (no HTTP server required). + * - Uses temporary HOME/USERPROFILE to isolate user skills directory. + */ + +import { after, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const skillsRoutesUrl = new URL('../../dist/core/routes/skills-routes.js', import.meta.url); +skillsRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +const originalEnv = { + HOME: process.env.HOME, + USERPROFILE: process.env.USERPROFILE, + HOMEDRIVE: process.env.HOMEDRIVE, + HOMEPATH: process.env.HOMEPATH, +}; + +async function callSkills( + initialPath: string, + method: string, + path: string, +): Promise<{ handled: boolean; status: number; json: any }> { + const url = new URL(path, 'http://localhost'); + let status = 0; + let body = ''; + + const res = { + writeHead(code: number) { + status = code; + }, + end(chunk?: any) { + body = chunk === undefined ? '' : String(chunk); + }, + }; + + const handled = await mod.handleSkillsRoutes({ + pathname: url.pathname, + url, + req: { method }, + res, + initialPath, + handlePostRequest() { + throw new Error('handlePostRequest should not be called for these tests'); + }, + }); + + return { handled, status, json: body ? JSON.parse(body) : null }; +} + +describe('skills routes integration', async () => { + let homeDir = ''; + let projectRoot = ''; + + before(async () => { + homeDir = mkdtempSync(join(tmpdir(), 'ccw-skills-home-')); + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-skills-project-')); + + process.env.HOME = homeDir; + process.env.USERPROFILE = homeDir; + process.env.HOMEDRIVE = undefined; + process.env.HOMEPATH = undefined; + + mock.method(console, 'error', () => {}); + mod = await import(skillsRoutesUrl.href); + }); + + beforeEach(() => { + rmSync(join(homeDir, '.claude'), { recursive: true, force: true }); + rmSync(join(projectRoot, '.claude'), { recursive: true, force: true }); + + const skillDir = join(projectRoot, '.claude', 'skills', 'test-skill'); + mkdirSync(skillDir, { recursive: true }); + writeFileSync( + join(skillDir, 'SKILL.md'), + `--- +name: "Test Skill" +description: "A test skill" +version: "1.0.0" +allowed-tools: [ccw issue next] +--- + +# Test +`, + 'utf8', + ); + writeFileSync(join(skillDir, 'extra.txt'), 'extra', 'utf8'); + }); + + after(() => { + mock.restoreAll(); + process.env.HOME = originalEnv.HOME; + process.env.USERPROFILE = originalEnv.USERPROFILE; + process.env.HOMEDRIVE = originalEnv.HOMEDRIVE; + process.env.HOMEPATH = originalEnv.HOMEPATH; + + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(homeDir, { recursive: true, force: true }); + }); + + it('GET /api/skills lists projectSkills and userSkills', async () => { + const res = await callSkills(projectRoot, 'GET', `/api/skills?path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(Array.isArray(res.json.projectSkills), true); + assert.equal(Array.isArray(res.json.userSkills), true); + assert.equal(res.json.projectSkills.length, 1); + assert.equal(res.json.projectSkills[0].folderName, 'test-skill'); + assert.equal(res.json.projectSkills[0].name, 'Test Skill'); + assert.ok(res.json.projectSkills[0].supportingFiles.includes('extra.txt')); + }); + + it('GET /api/skills/:name returns skill detail with parsed content', async () => { + const res = await callSkills(projectRoot, 'GET', `/api/skills/test-skill?location=project&path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.handled, true); + assert.equal(res.status, 200); + assert.equal(res.json.skill.folderName, 'test-skill'); + assert.equal(res.json.skill.name, 'Test Skill'); + assert.equal(Array.isArray(res.json.skill.allowedTools), true); + assert.ok(String(res.json.skill.content).includes('# Test')); + }); + + it('returns 404 when skill is missing', async () => { + const res = await callSkills(projectRoot, 'GET', `/api/skills/nope?location=project&path=${encodeURIComponent(projectRoot)}`); + assert.equal(res.handled, true); + assert.equal(res.status, 404); + assert.ok(res.json.error); + }); +}); + diff --git a/ccw/tests/issue-command.test.ts b/ccw/tests/issue-command.test.ts new file mode 100644 index 00000000..6d8e60f0 --- /dev/null +++ b/ccw/tests/issue-command.test.ts @@ -0,0 +1,1357 @@ +/** + * Unit tests for issue command module (ccw issue) + * + * Notes: + * - Targets the runtime implementation shipped in `ccw/dist`. + * - Uses isolated temp directories to avoid touching the real `.workflow/` tree. + */ + +import { afterEach, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import { existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join, resolve } from 'node:path'; +import inquirer from 'inquirer'; + +const issueCommandUrl = new URL('../dist/commands/issue.js', import.meta.url).href; + +interface TestIssuesEnv { + projectDir: string; + workflowDir: string; + issuesDir: string; + solutionsDir: string; + queuesDir: string; +} + +const ORIGINAL_CWD = process.cwd(); + +function setupTestIssuesDir(): TestIssuesEnv { + const projectDir = mkdtempSync(join(tmpdir(), 'ccw-issue-cmd-')); + const workflowDir = join(projectDir, '.workflow'); + const issuesDir = join(workflowDir, 'issues'); + const solutionsDir = join(issuesDir, 'solutions'); + const queuesDir = join(issuesDir, 'queues'); + + mkdirSync(solutionsDir, { recursive: true }); + mkdirSync(queuesDir, { recursive: true }); + + process.chdir(projectDir); + + return { projectDir, workflowDir, issuesDir, solutionsDir, queuesDir }; +} + +function cleanupTestIssuesDir(env: TestIssuesEnv): void { + process.chdir(ORIGINAL_CWD); + rmSync(env.projectDir, { recursive: true, force: true }); +} + +type MockIssue = { + id: string; + title: string; + status: string; + priority: number; + context: string; + bound_solution_id: string | null; + created_at: string; + updated_at: string; +}; + +type MockSolution = { + id: string; + tasks: unknown[]; + is_bound: boolean; + created_at: string; + bound_at?: string; + description?: string; + approach?: string; + exploration_context?: Record; + analysis?: { risk?: string; impact?: string; complexity?: string }; + score?: number; +}; + +function createMockIssue(overrides: Partial = {}): MockIssue { + const now = new Date().toISOString(); + return { + id: overrides.id ?? 'ISS-TEST-001', + title: overrides.title ?? 'Test issue', + status: overrides.status ?? 'registered', + priority: overrides.priority ?? 3, + context: overrides.context ?? 'Test context', + bound_solution_id: overrides.bound_solution_id ?? null, + created_at: overrides.created_at ?? now, + updated_at: overrides.updated_at ?? now, + ...overrides, + }; +} + +function createMockSolution(overrides: Partial = {}): MockSolution { + const now = new Date().toISOString(); + return { + id: overrides.id ?? 'SOL-ISS-TEST-001-1', + tasks: overrides.tasks ?? [], + is_bound: overrides.is_bound ?? false, + created_at: overrides.created_at ?? now, + bound_at: overrides.bound_at, + description: overrides.description, + approach: overrides.approach, + exploration_context: overrides.exploration_context, + analysis: overrides.analysis, + score: overrides.score, + ...overrides, + }; +} + +function readJsonl(path: string): any[] { + if (!existsSync(path)) return []; + return readFileSync(path, 'utf8') + .split('\n') + .filter((line) => line.trim().length > 0) + .map((line) => JSON.parse(line)); +} + +class ExitError extends Error { + code?: number; + + constructor(code?: number) { + super(`process.exit(${code ?? 'undefined'})`); + this.code = code; + } +} + +async function expectProcessExit(fn: () => Promise, code = 1): Promise { + mock.method(process as any, 'exit', (exitCode?: number) => { + throw new ExitError(exitCode); + }); + + await assert.rejects( + fn(), + (err: any) => err instanceof ExitError && err.code === code, + ); +} + +describe('issue command module', async () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let issueModule: any; + let env: TestIssuesEnv | null = null; + + beforeEach(() => { + mock.restoreAll(); + env = setupTestIssuesDir(); + }); + + afterEach(() => { + if (env) cleanupTestIssuesDir(env); + env = null; + mock.restoreAll(); + }); + + it('setup/teardown creates isolated temp directories', () => { + assert.ok(env); + assert.ok(existsSync(env.workflowDir)); + assert.ok(existsSync(env.issuesDir)); + assert.ok(existsSync(env.solutionsDir)); + assert.ok(existsSync(env.queuesDir)); + assert.ok(resolve(process.cwd()).startsWith(resolve(env.projectDir))); + }); + + it('mock generators produce schema-shaped objects', () => { + const issue = createMockIssue(); + assert.equal(typeof issue.id, 'string'); + assert.equal(typeof issue.title, 'string'); + assert.equal(typeof issue.status, 'string'); + assert.equal(typeof issue.priority, 'number'); + assert.equal(typeof issue.context, 'string'); + assert.ok(issue.created_at); + assert.ok(issue.updated_at); + assert.equal(issue.bound_solution_id, null); + + const solution = createMockSolution(); + assert.equal(typeof solution.id, 'string'); + assert.ok(Array.isArray(solution.tasks)); + assert.equal(typeof solution.is_bound, 'boolean'); + assert.ok(solution.created_at); + }); + + it('writes issue data under the temp .workflow directory', async () => { + issueModule ??= await import(issueCommandUrl); + + assert.ok(env); + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + issueModule.writeIssues([createMockIssue({ id: 'ISS-TEST-WRITE' })]); + const issuesJsonlPath = join(env.issuesDir, 'issues.jsonl'); + assert.ok(existsSync(issuesJsonlPath)); + assert.match(readFileSync(issuesJsonlPath, 'utf8'), /ISS-TEST-WRITE/); + }); + + describe('JSONL Operations', () => { + it('readIssues returns [] when issues.jsonl is missing', async () => { + issueModule ??= await import(issueCommandUrl); + assert.deepEqual(issueModule.readIssues(), []); + }); + + it('writeIssues writes newline-delimited JSON with trailing newline', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + issueModule.writeIssues([ + createMockIssue({ id: 'ISS-JSONL-1' }), + createMockIssue({ id: 'ISS-JSONL-2' }), + ]); + + const issuesJsonlPath = join(env.issuesDir, 'issues.jsonl'); + const content = readFileSync(issuesJsonlPath, 'utf8'); + assert.ok(content.endsWith('\n')); + + const lines = content.split('\n').filter((line) => line.trim().length > 0); + assert.equal(lines.length, 2); + assert.deepEqual(lines.map((l) => JSON.parse(l).id), ['ISS-JSONL-1', 'ISS-JSONL-2']); + assert.deepEqual(issueModule.readIssues().map((i: any) => i.id), ['ISS-JSONL-1', 'ISS-JSONL-2']); + }); + + it('readIssues returns [] for corrupted JSONL', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + writeFileSync(join(env.issuesDir, 'issues.jsonl'), '{bad json}\n', 'utf8'); + assert.deepEqual(issueModule.readIssues(), []); + }); + + it('readIssues returns [] when issues.jsonl is a directory', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mkdirSync(join(env.issuesDir, 'issues.jsonl'), { recursive: true }); + assert.deepEqual(issueModule.readIssues(), []); + }); + + it('writeIssues throws when issues.jsonl is a directory', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mkdirSync(join(env.issuesDir, 'issues.jsonl'), { recursive: true }); + assert.throws(() => issueModule.writeIssues([createMockIssue({ id: 'ISS-WRITE-ERR' })])); + }); + + it('readSolutions returns [] when solution JSONL is missing', async () => { + issueModule ??= await import(issueCommandUrl); + assert.deepEqual(issueModule.readSolutions('ISS-NO-SOL'), []); + }); + + it('writeSolutions writes newline-delimited JSON with trailing newline', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + issueModule.writeSolutions('ISS-SOL-1', [ + createMockSolution({ id: 'SOL-ISS-SOL-1-1' }), + createMockSolution({ id: 'SOL-ISS-SOL-1-2' }), + ]); + + const solutionsPath = join(env.solutionsDir, 'ISS-SOL-1.jsonl'); + const content = readFileSync(solutionsPath, 'utf8'); + assert.ok(content.endsWith('\n')); + + const lines = content.split('\n').filter((line) => line.trim().length > 0); + assert.equal(lines.length, 2); + assert.deepEqual(lines.map((l) => JSON.parse(l).id), ['SOL-ISS-SOL-1-1', 'SOL-ISS-SOL-1-2']); + assert.deepEqual(issueModule.readSolutions('ISS-SOL-1').map((s: any) => s.id), ['SOL-ISS-SOL-1-1', 'SOL-ISS-SOL-1-2']); + }); + + it('writeSolutions overwrites with full list (append via read->push->write)', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + issueModule.writeSolutions('ISS-SOL-APPEND', [createMockSolution({ id: 'SOL-ISS-SOL-APPEND-1' })]); + issueModule.writeSolutions('ISS-SOL-APPEND', [ + createMockSolution({ id: 'SOL-ISS-SOL-APPEND-1' }), + createMockSolution({ id: 'SOL-ISS-SOL-APPEND-2' }), + ]); + + const ids = issueModule.readSolutions('ISS-SOL-APPEND').map((s: any) => s.id); + assert.deepEqual(ids, ['SOL-ISS-SOL-APPEND-1', 'SOL-ISS-SOL-APPEND-2']); + }); + + it('readSolutions returns [] for corrupted JSONL', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + writeFileSync(join(env.solutionsDir, 'ISS-SOL-BAD.jsonl'), '{bad json}\n', 'utf8'); + assert.deepEqual(issueModule.readSolutions('ISS-SOL-BAD'), []); + }); + + it('writeSolutions throws when target path is a directory', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mkdirSync(join(env.solutionsDir, 'ISS-SOL-DIR.jsonl'), { recursive: true }); + assert.throws(() => issueModule.writeSolutions('ISS-SOL-DIR', [createMockSolution({ id: 'SOL-X' })])); + }); + }); + + describe('Issue Lifecycle', () => { + it('transitions registered → planning → planned → queued → executing → completed', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + const issueId = 'ISS-LC-1'; + const solutionId = 'SOL-ISS-LC-1-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'registered' })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: solutionId, is_bound: false })]); + + await issueModule.issueCommand('update', [issueId], { status: 'planning' }); + assert.equal(issueModule.readIssues().find((i: any) => i.id === issueId)?.status, 'planning'); + + await issueModule.issueCommand('bind', [issueId, solutionId], {}); + const planned = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(planned?.status, 'planned'); + assert.equal(planned?.bound_solution_id, solutionId); + assert.ok(planned?.planned_at); + + await issueModule.issueCommand('queue', ['add', issueId], {}); + const queued = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(queued?.status, 'queued'); + assert.ok(queued?.queued_at); + + await issueModule.issueCommand('next', [], {}); + const executing = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(executing?.status, 'executing'); + + const queue = issueModule.readQueue(); + assert.ok(queue); + const itemId = (queue.solutions || queue.tasks || [])[0]?.item_id; + assert.equal(itemId, 'S-1'); + + await issueModule.issueCommand('done', [itemId], {}); + + // Completed issues are auto-moved to history. + assert.equal(issueModule.readIssues().some((i: any) => i.id === issueId), false); + const history = readJsonl(join(env.issuesDir, 'issue-history.jsonl')); + const completed = history.find((i: any) => i.id === issueId); + assert.equal(completed?.status, 'completed'); + assert.ok(completed?.completed_at); + }); + + it('transitions executing → failed when done is called with --fail', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + const issueId = 'ISS-LC-FAIL'; + const solutionId = 'SOL-ISS-LC-FAIL-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'registered' })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: solutionId, is_bound: true })]); + + // Directly queue (already bound) + await issueModule.issueCommand('queue', ['add', issueId], {}); + await issueModule.issueCommand('next', [], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + const itemId = (queue.solutions || queue.tasks || [])[0]?.item_id; + + await issueModule.issueCommand('done', [itemId], { fail: true, reason: 'boom' }); + + const failed = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(failed?.status, 'failed'); + + const updatedQueue = issueModule.readQueue(queue.id); + const updatedItem = (updatedQueue?.solutions || updatedQueue?.tasks || []).find((i: any) => i.item_id === itemId); + assert.equal(updatedItem?.status, 'failed'); + assert.ok(updatedItem?.completed_at); + assert.equal(updatedItem?.failure_reason, 'boom'); + }); + + it('update sets planned_at when status is set to planned', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-UPD-PLANNED'; + const oldUpdatedAt = '2000-01-01T00:00:00.000Z'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning', updated_at: oldUpdatedAt })]); + + await issueModule.issueCommand('update', [issueId], { status: 'planned' }); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'planned'); + assert.ok(issue?.planned_at); + assert.notEqual(issue?.updated_at, oldUpdatedAt); + }); + + it('update sets queued_at when status is set to queued', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-UPD-QUEUED'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned' })]); + + await issueModule.issueCommand('update', [issueId], { status: 'queued' }); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'queued'); + assert.ok(issue?.queued_at); + }); + + it('update rejects invalid status values', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-UPD-BAD'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'registered' })]); + + await expectProcessExit(() => issueModule.issueCommand('update', [issueId], { status: 'not-a-status' }), 1); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'registered'); + }); + + it('update to completed moves issue to history', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-UPD-COMPLETE'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'executing' })]); + + await issueModule.issueCommand('update', [issueId], { status: 'completed' }); + + assert.equal(issueModule.readIssues().some((i: any) => i.id === issueId), false); + const history = readJsonl(join(env.issuesDir, 'issue-history.jsonl')); + const completed = history.find((i: any) => i.id === issueId); + assert.equal(completed?.status, 'completed'); + assert.ok(completed?.completed_at); + }); + + it('queue add fails when issue has no bound solution', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-QUEUE-NO-SOL'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned' })]); + + await expectProcessExit(() => issueModule.issueCommand('queue', ['add', issueId], {}), 1); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'planned'); + }); + + it('next returns empty when no active queues exist', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + await issueModule.issueCommand('next', [], {}); + + const payload = JSON.parse(logs.at(-1) || '{}'); + assert.equal(payload.status, 'empty'); + assert.match(payload.message, /No active queues/); + }); + }); + + describe('Solution Binding', () => { + it('binds a solution and marks the issue as planned', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-BIND-1'; + const solutionId = 'SOL-ISS-BIND-1-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning' })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: solutionId, is_bound: false })]); + + await issueModule.issueCommand('bind', [issueId, solutionId], {}); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'planned'); + assert.equal(issue?.bound_solution_id, solutionId); + assert.ok(issue?.planned_at); + + const solutions = issueModule.readSolutions(issueId); + assert.equal(solutions.length, 1); + assert.equal(solutions[0].id, solutionId); + assert.equal(solutions[0].is_bound, true); + assert.ok(solutions[0].bound_at); + }); + + it('binding a second solution unbinds the previous one', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-BIND-2'; + const sol1 = 'SOL-ISS-BIND-2-1'; + const sol2 = 'SOL-ISS-BIND-2-2'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning' })]); + issueModule.writeSolutions(issueId, [ + createMockSolution({ id: sol1, is_bound: false }), + createMockSolution({ id: sol2, is_bound: false }), + ]); + + await issueModule.issueCommand('bind', [issueId, sol1], {}); + await issueModule.issueCommand('bind', [issueId, sol2], {}); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.bound_solution_id, sol2); + assert.equal(issue?.status, 'planned'); + + const solutions = issueModule.readSolutions(issueId); + const bound = solutions.filter((s: any) => s.is_bound); + assert.equal(bound.length, 1); + assert.equal(bound[0].id, sol2); + assert.equal(solutions.find((s: any) => s.id === sol1)?.is_bound, false); + }); + + it('bind fails when the requested solution does not exist', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-BIND-ERR'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning' })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: 'SOL-ISS-BIND-ERR-1', is_bound: false })]); + + await expectProcessExit(() => issueModule.issueCommand('bind', [issueId, 'SOL-NOT-FOUND'], {}), 1); + }); + + it('bind fails when issue does not exist', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + await expectProcessExit(() => issueModule.issueCommand('bind', ['ISS-NOT-FOUND', 'SOL-ISS-NOT-FOUND-1'], {}), 1); + }); + + it('bind lists available solutions when solution id is omitted', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-BIND-LIST'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning' })]); + issueModule.writeSolutions(issueId, [ + createMockSolution({ id: 'SOL-ISS-BIND-LIST-1', is_bound: false }), + createMockSolution({ id: 'SOL-ISS-BIND-LIST-2', is_bound: false }), + ]); + + await issueModule.issueCommand('bind', [issueId], {}); + + const output = logs.join('\n'); + assert.match(output, new RegExp(`Solutions for ${issueId}`)); + assert.match(output, /SOL-ISS-BIND-LIST-1/); + assert.match(output, /SOL-ISS-BIND-LIST-2/); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.bound_solution_id, null); + assert.equal(issue?.status, 'planning'); + }); + + it('bind --solution registers and binds a solution file', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-BIND-FILE'; + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planning' })]); + + const solutionPath = join(env.projectDir, 'solution.json'); + writeFileSync(solutionPath, JSON.stringify({ description: 'From file', tasks: [{ id: 'T1' }] }), 'utf8'); + + await issueModule.issueCommand('bind', [issueId], { solution: solutionPath }); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'planned'); + assert.ok(issue?.bound_solution_id); + assert.match(issue.bound_solution_id, new RegExp(`^SOL-${issueId}-\\d+$`)); + + const solutions = issueModule.readSolutions(issueId); + assert.equal(solutions.length, 1); + assert.equal(solutions[0].id, issue.bound_solution_id); + assert.equal(solutions[0].is_bound, true); + assert.ok(solutions[0].bound_at); + assert.equal(Array.isArray(solutions[0].tasks), true); + assert.equal(solutions[0].tasks.length, 1); + }); + }); + + describe('Queue Formation', () => { + function makeSolutionWithFiles(id: string, files: string[], isBound = true): MockSolution { + return createMockSolution({ + id, + is_bound: isBound, + tasks: [ + { + id: 'T1', + modification_points: files.map((file) => ({ file, target: 'x', change: 'y' })), + }, + ], + }); + } + + it('creates an active queue with a solution-level item', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-QUEUE-1'; + const solutionId = 'SOL-ISS-QUEUE-1-1'; + const files = ['src/a.ts', 'src/b.ts']; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]); + issueModule.writeSolutions(issueId, [makeSolutionWithFiles(solutionId, files, true)]); + + await issueModule.issueCommand('queue', ['add', issueId], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + assert.ok(typeof queue.id === 'string' && queue.id.startsWith('QUE-')); + assert.equal(queue.status, 'active'); + assert.ok(queue.issue_ids.includes(issueId)); + + const items = queue.solutions || []; + assert.equal(items.length, 1); + assert.equal(items[0].item_id, 'S-1'); + assert.equal(items[0].issue_id, issueId); + assert.equal(items[0].solution_id, solutionId); + assert.equal(items[0].status, 'pending'); + assert.equal(items[0].execution_order, 1); + assert.equal(items[0].execution_group, 'P1'); + assert.deepEqual(items[0].files_touched?.sort(), files.slice().sort()); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'queued'); + }); + + it('generates queue IDs in QUE-YYYYMMDDHHMMSS format', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-QUEUE-ID'; + const solutionId = 'SOL-ISS-QUEUE-ID-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]); + issueModule.writeSolutions(issueId, [makeSolutionWithFiles(solutionId, ['src/a.ts'], true)]); + + await issueModule.issueCommand('queue', ['add', issueId], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + assert.match(queue.id, /^QUE-\d{14}$/); + }); + + it('does not add duplicate solution items to the queue', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-QUEUE-DUPE'; + const solutionId = 'SOL-ISS-QUEUE-DUPE-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]); + issueModule.writeSolutions(issueId, [makeSolutionWithFiles(solutionId, ['src/a.ts'], true)]); + + await issueModule.issueCommand('queue', ['add', issueId], {}); + await issueModule.issueCommand('queue', ['add', issueId], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + const items = queue.solutions || []; + assert.equal(items.length, 1); + assert.equal(items[0].issue_id, issueId); + assert.equal(items[0].solution_id, solutionId); + }); + + it('deduplicates files_touched extracted from modification_points', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issueId = 'ISS-QUEUE-FILES'; + const solutionId = 'SOL-ISS-QUEUE-FILES-1'; + const files = ['src/dup.ts', 'src/dup.ts', 'src/other.ts', 'src/dup.ts']; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]); + issueModule.writeSolutions(issueId, [makeSolutionWithFiles(solutionId, files, true)]); + + await issueModule.issueCommand('queue', ['add', issueId], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + const items = queue.solutions || []; + assert.equal(items.length, 1); + assert.deepEqual(items[0].files_touched?.sort(), ['src/dup.ts', 'src/other.ts']); + }); + + it('adds multiple issues to the same active queue with incrementing item IDs', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const issue1 = 'ISS-QUEUE-M-1'; + const issue2 = 'ISS-QUEUE-M-2'; + + issueModule.writeIssues([ + createMockIssue({ id: issue1, status: 'planned' }), + createMockIssue({ id: issue2, status: 'planned' }), + ]); + issueModule.writeSolutions(issue1, [makeSolutionWithFiles('SOL-ISS-QUEUE-M-1-1', ['src/one.ts'], true)]); + issueModule.writeSolutions(issue2, [makeSolutionWithFiles('SOL-ISS-QUEUE-M-2-1', ['src/two.ts'], true)]); + + await issueModule.issueCommand('queue', ['add', issue1], {}); + await issueModule.issueCommand('queue', ['add', issue2], {}); + + const queue = issueModule.readQueue(); + assert.ok(queue); + const items = queue.solutions || []; + assert.equal(items.length, 2); + assert.deepEqual(items.map((i: any) => i.item_id), ['S-1', 'S-2']); + assert.deepEqual(items.map((i: any) => i.execution_order), [1, 2]); + assert.ok(queue.issue_ids.includes(issue1)); + assert.ok(queue.issue_ids.includes(issue2)); + }); + + it('queue dag batches non-conflicting items together', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + const issue1 = 'ISS-DAG-1'; + const issue2 = 'ISS-DAG-2'; + issueModule.writeIssues([createMockIssue({ id: issue1 }), createMockIssue({ id: issue2 })]); + issueModule.writeSolutions(issue1, [makeSolutionWithFiles('SOL-ISS-DAG-1-1', ['src/a.ts'], true)]); + issueModule.writeSolutions(issue2, [makeSolutionWithFiles('SOL-ISS-DAG-2-1', ['src/b.ts'], true)]); + + await issueModule.issueCommand('queue', ['add', issue1], {}); + await issueModule.issueCommand('queue', ['add', issue2], {}); + + logs.length = 0; + await issueModule.issueCommand('queue', ['dag'], {}); + + const payload = JSON.parse(logs.at(-1) || '{}'); + assert.deepEqual(payload.parallel_batches, [['S-1', 'S-2']]); + assert.equal(payload._summary.batches_needed, 1); + }); + + it('queue dag separates conflicting items into multiple batches', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + const shared = 'src/shared.ts'; + const issue1 = 'ISS-DAG-C-1'; + const issue2 = 'ISS-DAG-C-2'; + issueModule.writeIssues([createMockIssue({ id: issue1 }), createMockIssue({ id: issue2 })]); + issueModule.writeSolutions(issue1, [makeSolutionWithFiles('SOL-ISS-DAG-C-1-1', [shared], true)]); + issueModule.writeSolutions(issue2, [makeSolutionWithFiles('SOL-ISS-DAG-C-2-1', [shared], true)]); + + await issueModule.issueCommand('queue', ['add', issue1], {}); + await issueModule.issueCommand('queue', ['add', issue2], {}); + + logs.length = 0; + await issueModule.issueCommand('queue', ['dag'], {}); + + const payload = JSON.parse(logs.at(-1) || '{}'); + assert.equal(payload.parallel_batches.length, 2); + assert.deepEqual(payload.parallel_batches[0], ['S-1']); + assert.deepEqual(payload.parallel_batches[1], ['S-2']); + }); + + it('queue dag builds edges for depends_on and marks blocked items', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + const queueId = 'QUE-20260107000000'; + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: ['ISS-DEP'], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: 'ISS-DEP', + solution_id: 'SOL-ISS-DEP-1', + status: 'pending', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + files_touched: ['src/a.ts'], + task_count: 1, + }, + { + item_id: 'S-2', + issue_id: 'ISS-DEP', + solution_id: 'SOL-ISS-DEP-2', + status: 'pending', + execution_order: 2, + execution_group: 'P1', + depends_on: ['S-1'], + semantic_priority: 0.5, + files_touched: ['src/b.ts'], + task_count: 1, + }, + ], + conflicts: [], + }); + + await issueModule.issueCommand('queue', ['dag', queueId], {}); + const payload = JSON.parse(logs.at(-1) || '{}'); + + assert.deepEqual(payload.edges, [{ from: 'S-1', to: 'S-2' }]); + const node1 = payload.nodes.find((n: any) => n.id === 'S-1'); + const node2 = payload.nodes.find((n: any) => n.id === 'S-2'); + assert.equal(node1.ready, true); + assert.equal(node2.ready, false); + assert.deepEqual(node2.blocked_by, ['S-1']); + assert.deepEqual(payload.parallel_batches, [['S-1']]); + }); + + it('prompts for confirmation before deleting a queue (and cancels safely)', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + + const queueId = 'QUE-DELETE-CANCEL'; + issueModule.writeQueue({ + id: queueId, + status: 'completed', + issue_ids: [], + tasks: [], + solutions: [], + conflicts: [], + }); + + const promptCalls: any[] = []; + mock.method(inquirer, 'prompt', async (questions: any) => { + promptCalls.push(questions); + return { proceed: false }; + }); + + await issueModule.issueCommand('queue', ['delete', queueId], {}); + + assert.equal(promptCalls.length, 1); + assert.equal(promptCalls[0][0].type, 'confirm'); + assert.equal(promptCalls[0][0].default, false); + assert.ok(promptCalls[0][0].message.includes(queueId)); + assert.ok(logs.some((l) => l.includes('Queue deletion cancelled'))); + assert.ok(existsSync(join(env.queuesDir, `${queueId}.json`))); + }); + + it('deletes a queue after interactive confirmation', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const queueId = 'QUE-DELETE-CONFIRM'; + issueModule.writeQueue({ + id: queueId, + status: 'completed', + issue_ids: [], + tasks: [], + solutions: [], + conflicts: [], + }); + + mock.method(inquirer, 'prompt', async () => ({ proceed: true })); + + await issueModule.issueCommand('queue', ['delete', queueId], {}); + + assert.equal(existsSync(join(env.queuesDir, `${queueId}.json`)), false); + }); + + it('bypasses confirmation prompt when --force is set for queue delete', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const queueId = 'QUE-DELETE-FORCE'; + issueModule.writeQueue({ + id: queueId, + status: 'completed', + issue_ids: [], + tasks: [], + solutions: [], + conflicts: [], + }); + + mock.method(inquirer, 'prompt', async () => { + throw new Error('inquirer.prompt should not be called when --force is set'); + }); + + await issueModule.issueCommand('queue', ['delete', queueId], { force: true }); + + assert.equal(existsSync(join(env.queuesDir, `${queueId}.json`)), false); + }); + }); + + describe('Queue Execution', () => { + async function runNext(queueId: string): Promise { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + await issueModule.issueCommand('next', [], { queue: queueId }); + return JSON.parse(logs.at(-1) || '{}'); + } + + it('next respects dependencies and advances after done()', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + const queueId = 'QUE-20260107010101'; + const issue1 = 'ISS-NEXT-1'; + const issue2 = 'ISS-NEXT-2'; + const sol1 = 'SOL-ISS-NEXT-1-1'; + const sol2 = 'SOL-ISS-NEXT-2-1'; + + issueModule.writeIssues([createMockIssue({ id: issue1, status: 'queued' }), createMockIssue({ id: issue2, status: 'queued' })]); + issueModule.writeSolutions(issue1, [createMockSolution({ id: sol1, is_bound: false })]); + issueModule.writeSolutions(issue2, [createMockSolution({ id: sol2, is_bound: false })]); + + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issue1, issue2], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issue1, + solution_id: sol1, + status: 'pending', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + { + item_id: 'S-2', + issue_id: issue2, + solution_id: sol2, + status: 'pending', + execution_order: 2, + execution_group: 'P1', + depends_on: ['S-1'], + semantic_priority: 0.5, + task_count: 1, + }, + ], + conflicts: [], + }); + + mock.restoreAll(); + const first = await runNext(queueId); + assert.equal(first.item_id, 'S-1'); + + // Mark S-1 complete so S-2 becomes ready. + mock.restoreAll(); + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + await issueModule.issueCommand('done', ['S-1'], { queue: queueId }); + + mock.restoreAll(); + const second = await runNext(queueId); + assert.equal(second.item_id, 'S-2'); + }); + + it('next selects lowest execution_order among ready items', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const queueId = 'QUE-20260107011111'; + const issue1 = 'ISS-ORDER-1'; + const issue2 = 'ISS-ORDER-2'; + const sol1 = 'SOL-ISS-ORDER-1-1'; + const sol2 = 'SOL-ISS-ORDER-2-1'; + + issueModule.writeIssues([createMockIssue({ id: issue1, status: 'queued' }), createMockIssue({ id: issue2, status: 'queued' })]); + issueModule.writeSolutions(issue1, [createMockSolution({ id: sol1, is_bound: false })]); + issueModule.writeSolutions(issue2, [createMockSolution({ id: sol2, is_bound: false })]); + + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issue1, issue2], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issue1, + solution_id: sol1, + status: 'pending', + execution_order: 2, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + { + item_id: 'S-2', + issue_id: issue2, + solution_id: sol2, + status: 'pending', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + ], + conflicts: [], + }); + + const next = await runNext(queueId); + assert.equal(next.item_id, 'S-2'); + }); + + it('next skips failed items when auto-selecting', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const queueId = 'QUE-20260107020202'; + const issue1 = 'ISS-SKIP-1'; + const issue2 = 'ISS-SKIP-2'; + const sol1 = 'SOL-ISS-SKIP-1-1'; + const sol2 = 'SOL-ISS-SKIP-2-1'; + + issueModule.writeIssues([createMockIssue({ id: issue1, status: 'queued' }), createMockIssue({ id: issue2, status: 'queued' })]); + issueModule.writeSolutions(issue1, [createMockSolution({ id: sol1, is_bound: false })]); + issueModule.writeSolutions(issue2, [createMockSolution({ id: sol2, is_bound: false })]); + + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issue1, issue2], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issue1, + solution_id: sol1, + status: 'failed', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + failure_reason: 'nope', + task_count: 1, + }, + { + item_id: 'S-2', + issue_id: issue2, + solution_id: sol2, + status: 'pending', + execution_order: 2, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + ], + conflicts: [], + }); + + const next = await runNext(queueId); + assert.equal(next.item_id, 'S-2'); + }); + + it('done stores parsed result JSON on the queue item', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + const queueId = 'QUE-20260107022222'; + const issueId = 'ISS-DONE-RESULT'; + const solutionId = 'SOL-ISS-DONE-RESULT-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'executing' })]); + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issueId], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issueId, + solution_id: solutionId, + status: 'executing', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + started_at: new Date().toISOString(), + task_count: 1, + }, + ], + conflicts: [], + }); + + await issueModule.issueCommand('done', ['S-1'], { queue: queueId, result: '{"ok":true,"n":1}' }); + + const updatedQueue = issueModule.readQueue(queueId); + assert.equal(updatedQueue?.status, 'completed'); + const item = (updatedQueue?.solutions || []).find((i: any) => i.item_id === 'S-1'); + assert.equal(item?.status, 'completed'); + assert.ok(item?.completed_at); + assert.deepEqual(item?.result, { ok: true, n: 1 }); + }); + + it('retry resets failed items to pending and clears failure fields', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mock.method(console, 'warn', () => {}); + + const queueId = 'QUE-20260107030303'; + const issueId = 'ISS-RETRY-1'; + const solutionId = 'SOL-ISS-RETRY-1-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'failed' })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: solutionId, is_bound: false })]); + + issueModule.writeQueue({ + id: queueId, + status: 'failed', + issue_ids: [issueId], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issueId, + solution_id: solutionId, + status: 'failed', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + failure_reason: 'boom', + failure_details: { error_type: 'test_failure', message: 'boom', timestamp: new Date().toISOString() }, + task_count: 1, + }, + ], + conflicts: [], + }); + + await issueModule.issueCommand('retry', [issueId], { queue: queueId }); + + const updatedQueue = issueModule.readQueue(queueId); + const item = (updatedQueue?.solutions || []).find((i: any) => i.item_id === 'S-1'); + assert.equal(updatedQueue?.status, 'active'); + assert.equal(item?.status, 'pending'); + assert.equal(item?.failure_reason, undefined); + assert.equal(item?.failure_details, undefined); + assert.equal(Array.isArray(item?.failure_history), true); + assert.equal(item.failure_history.length, 1); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'queued'); + }); + + it('update --from-queue syncs planned issues to queued', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + const logs: string[] = []; + mock.method(console, 'log', (...args: any[]) => { + logs.push(args.map(String).join(' ')); + }); + mock.method(console, 'error', () => {}); + + const queueId = 'QUE-20260107040404'; + const issueId = 'ISS-SYNC-1'; + const solutionId = 'SOL-ISS-SYNC-1-1'; + + issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]); + issueModule.writeSolutions(issueId, [createMockSolution({ id: solutionId, is_bound: true })]); + + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issueId], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issueId, + solution_id: solutionId, + status: 'pending', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + ], + conflicts: [], + }); + + await issueModule.issueCommand('update', [], { fromQueue: true, json: true }); + const payload = JSON.parse(logs.at(-1) || '{}'); + assert.equal(payload.success, true); + assert.deepEqual(payload.queued, [issueId]); + + const issue = issueModule.readIssues().find((i: any) => i.id === issueId); + assert.equal(issue?.status, 'queued'); + assert.ok(issue?.queued_at); + }); + + it('marks queue as completed when all items are completed', async () => { + issueModule ??= await import(issueCommandUrl); + assert.ok(env); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + const queueId = 'QUE-20260107050505'; + const issue1 = 'ISS-QDONE-1'; + const issue2 = 'ISS-QDONE-2'; + const sol1 = 'SOL-ISS-QDONE-1-1'; + const sol2 = 'SOL-ISS-QDONE-2-1'; + + issueModule.writeIssues([createMockIssue({ id: issue1, status: 'queued' }), createMockIssue({ id: issue2, status: 'queued' })]); + issueModule.writeSolutions(issue1, [createMockSolution({ id: sol1, is_bound: false })]); + issueModule.writeSolutions(issue2, [createMockSolution({ id: sol2, is_bound: false })]); + + issueModule.writeQueue({ + id: queueId, + status: 'active', + issue_ids: [issue1, issue2], + tasks: [], + solutions: [ + { + item_id: 'S-1', + issue_id: issue1, + solution_id: sol1, + status: 'pending', + execution_order: 1, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + { + item_id: 'S-2', + issue_id: issue2, + solution_id: sol2, + status: 'pending', + execution_order: 2, + execution_group: 'P1', + depends_on: [], + semantic_priority: 0.5, + task_count: 1, + }, + ], + conflicts: [], + }); + + // Complete both items. + await issueModule.issueCommand('next', [], { queue: queueId }); + await issueModule.issueCommand('done', ['S-1'], { queue: queueId }); + assert.equal(issueModule.readQueue(queueId)?.status, 'active'); + + await issueModule.issueCommand('next', [], { queue: queueId }); + await issueModule.issueCommand('done', ['S-2'], { queue: queueId }); + assert.equal(issueModule.readQueue(queueId)?.status, 'completed'); + }); + }); +}); diff --git a/ccw/tests/middleware.test.ts b/ccw/tests/middleware.test.ts new file mode 100644 index 00000000..875c0a04 --- /dev/null +++ b/ccw/tests/middleware.test.ts @@ -0,0 +1,119 @@ +/** + * Unit tests for auth middleware (ccw/dist/core/auth/middleware.js) + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +const middlewareUrl = new URL('../dist/core/auth/middleware.js', import.meta.url).href; +const tokenManagerUrl = new URL('../dist/core/auth/token-manager.js', import.meta.url).href; + +type MockResponse = { + status: number | null; + headers: Record; + body: string; + writeHead: (status: number, headers?: Record) => void; + setHeader: (name: string, value: string) => void; + end: (body?: string) => void; +}; + +function createMockRes(): MockResponse { + const headers: Record = {}; + const response: MockResponse = { + status: null, + headers, + body: '', + writeHead: (status: number, nextHeaders?: Record) => { + response.status = status; + if (nextHeaders) { + for (const [k, v] of Object.entries(nextHeaders)) { + headers[k.toLowerCase()] = v; + } + } + }, + setHeader: (name: string, value: string) => { + headers[name.toLowerCase()] = value; + }, + end: (body?: string) => { + response.body = body ? String(body) : ''; + }, + }; + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let middleware: any; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let tokenMod: any; + +describe('auth middleware', async () => { + middleware = await import(middlewareUrl); + tokenMod = await import(tokenManagerUrl); + + it('rejects requests without tokens', () => { + const tokenManager = new tokenMod.TokenManager(); + const secretKey = 'secret'; + + const req: any = { headers: {}, socket: { remoteAddress: '127.0.0.1' } }; + const res = createMockRes(); + + const ok = middleware.authMiddleware({ + pathname: '/api/health', + req, + res, + tokenManager, + secretKey, + unauthenticatedPaths: new Set(['/api/auth/token']), + }); + + assert.equal(ok, false); + assert.equal(res.status, 401); + assert.ok(res.body.includes('Unauthorized')); + }); + + it('accepts Authorization: Bearer tokens', () => { + const tokenManager = new tokenMod.TokenManager(); + const secretKey = 'secret'; + const { token } = tokenManager.generateToken(secretKey); + + const req: any = { headers: { authorization: `Bearer ${token}` }, socket: { remoteAddress: '127.0.0.1' } }; + const res = createMockRes(); + + const ok = middleware.authMiddleware({ + pathname: '/api/health', + req, + res, + tokenManager, + secretKey, + }); + + assert.equal(ok, true); + assert.equal(req.authenticated, true); + }); + + it('accepts auth_token cookies', () => { + const tokenManager = new tokenMod.TokenManager(); + const secretKey = 'secret'; + const { token } = tokenManager.generateToken(secretKey); + + const req: any = { headers: { cookie: `auth_token=${encodeURIComponent(token)}` }, socket: { remoteAddress: '127.0.0.1' } }; + const res = createMockRes(); + + const ok = middleware.authMiddleware({ + pathname: '/api/health', + req, + res, + tokenManager, + secretKey, + }); + + assert.equal(ok, true); + }); + + it('isLocalhostRequest detects loopback addresses', () => { + assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '127.0.0.1' } } as any), true); + assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '::1' } } as any), true); + assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '::ffff:127.0.0.1' } } as any), true); + assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '10.0.0.5' } } as any), false); + }); +}); diff --git a/ccw/tests/path-resolver.test.ts b/ccw/tests/path-resolver.test.ts index 8254d5ce..e8917b2a 100644 --- a/ccw/tests/path-resolver.test.ts +++ b/ccw/tests/path-resolver.test.ts @@ -177,6 +177,32 @@ describe('path-resolver utility module', async () => { assert.ok(res.error?.includes('Path must be within')); }); + it('validatePath blocks symlink escapes even when target path does not exist', () => { + const baseDir = 'C:\\allowed'; + const linkPath = 'C:\\allowed\\link'; + setExists(linkPath, true); + setDir(linkPath, true); + setRealpath(linkPath, 'C:\\secret'); + + const res = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir }); + assert.equal(res.valid, false); + assert.equal(res.path, null); + assert.ok(res.error?.includes('Path must be within')); + }); + + it('validatePath allows symlinked parent directories that resolve within baseDir', () => { + const baseDir = 'C:\\allowed'; + const linkPath = 'C:\\allowed\\link'; + setExists(linkPath, true); + setDir(linkPath, true); + setRealpath(linkPath, 'C:\\allowed\\real'); + + const res = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir }); + assert.equal(res.valid, true); + assert.equal(res.path, path.join('C:\\allowed\\real', 'newfile.txt')); + assert.equal(res.error, null); + }); + it('validateOutputPath rejects directories and resolves relative output paths', () => { assert.equal(pathResolver.validateOutputPath('').valid, false); diff --git a/ccw/tests/security/command-injection.test.ts b/ccw/tests/security/command-injection.test.ts new file mode 100644 index 00000000..4263357d --- /dev/null +++ b/ccw/tests/security/command-injection.test.ts @@ -0,0 +1,243 @@ +/** + * Regression tests for command injection protections in cli-executor. + * + * Focus: ensure args are escaped on Windows when `shell: true` is required. + */ + +import { after, before, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import { createRequire } from 'node:module'; +import { EventEmitter } from 'node:events'; +import { PassThrough } from 'node:stream'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const cliExecutorUrl = new URL('../../dist/tools/cli-executor.js', import.meta.url).href; +const historyStoreUrl = new URL('../../dist/tools/cli-history-store.js', import.meta.url).href; +const shellEscapeUrl = new URL('../../dist/utils/shell-escape.js', import.meta.url).href; + +describe('cli-executor: command injection regression', async () => { + const isWindows = process.platform === 'win32'; + + const require = createRequire(import.meta.url); + const childProcess = require('child_process'); + const originalSpawn = childProcess.spawn; + + const originalSetTimeout = globalThis.setTimeout; + + const spawnCalls: Array<{ command: string; args: string[]; options: Record }> = []; + + const envSnapshot: Record = {}; + let ccwHome = ''; + let projectDir = ''; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let cliExecutorModule: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let historyStoreModule: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let shellEscapeModule: any; + + function unrefSetTimeout( + fn: (...args: TArgs) => void, + delay?: number, + ...args: TArgs + ): ReturnType { + const t = originalSetTimeout(fn as (...args: unknown[]) => void, delay as number, ...args); + (t as unknown as { unref?: () => void }).unref?.(); + return t; + } + + before(async () => { + envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR; + envSnapshot.DEBUG = process.env.DEBUG; + envSnapshot.CCW_DEBUG = process.env.CCW_DEBUG; + + ccwHome = mkdtempSync(join(tmpdir(), 'ccw-command-injection-home-')); + projectDir = mkdtempSync(join(tmpdir(), 'ccw-command-injection-project-')); + process.env.CCW_DATA_DIR = ccwHome; + delete process.env.DEBUG; + delete process.env.CCW_DEBUG; + + // Prevent long-lived timeouts in the module under test from delaying process exit. + globalThis.setTimeout = unrefSetTimeout as unknown as typeof setTimeout; + + shellEscapeModule = await import(shellEscapeUrl); + + // Patch child_process.spawn BEFORE importing cli-executor (it captures spawn at module init). + childProcess.spawn = (command: unknown, args: unknown[], options: Record) => { + const cmd = String(command); + const argv = Array.isArray(args) ? args.map((a) => String(a)) : []; + spawnCalls.push({ command: cmd, args: argv, options: options || {} }); + + const child = new EventEmitter() as any; + child.pid = 4242; + child.killed = false; + child.stdin = new PassThrough(); + child.stdout = new PassThrough(); + child.stderr = new PassThrough(); + + let closed = false; + child.kill = () => { + child.killed = true; + if (!closed) { + closed = true; + child.stdout.end(); + child.stderr.end(); + child.emit('close', 0); + } + return true; + }; + + process.nextTick(() => { + if (closed) return; + if (cmd === 'where' || cmd === 'which') { + const tool = argv[0] || 'tool'; + child.stdout.write(`C:\\\\fake\\\\${tool}.cmd\r\n`); + child.stdout.end(); + child.stderr.end(); + closed = true; + child.emit('close', 0); + return; + } + + child.stdout.write('ok\n'); + child.stdout.end(); + child.stderr.end(); + closed = true; + child.emit('close', 0); + }); + + return child; + }; + + cliExecutorModule = await import(cliExecutorUrl); + historyStoreModule = await import(historyStoreUrl); + }); + + after(async () => { + childProcess.spawn = originalSpawn; + globalThis.setTimeout = originalSetTimeout; + + try { + historyStoreModule?.closeAllStores?.(); + } catch { + // ignore + } + + if (projectDir) rmSync(projectDir, { recursive: true, force: true }); + if (ccwHome) rmSync(ccwHome, { recursive: true, force: true }); + + process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR; + if (envSnapshot.DEBUG === undefined) delete process.env.DEBUG; + else process.env.DEBUG = envSnapshot.DEBUG; + if (envSnapshot.CCW_DEBUG === undefined) delete process.env.CCW_DEBUG; + else process.env.CCW_DEBUG = envSnapshot.CCW_DEBUG; + }); + + it('escapes dangerous metacharacters for Windows shell execution', async () => { + const escapeWindowsArg = shellEscapeModule.escapeWindowsArg as (arg: string) => string; + + const cases: Array<{ + name: string; + params: Record; + expectedCommand: string; + expectedArgs: string[]; + }> = [ + { + name: 'gemini: model includes &', + params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-amp', model: 'gpt-4 & calc' }, + expectedCommand: 'gemini', + expectedArgs: ['-m', 'gpt-4 & calc'], + }, + { + name: 'gemini: model includes |', + params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-pipe', model: 'gpt|calc' }, + expectedCommand: 'gemini', + expectedArgs: ['-m', 'gpt|calc'], + }, + { + name: 'gemini: model includes >', + params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-gt', model: 'gpt>out.txt' }, + expectedCommand: 'gemini', + expectedArgs: ['-m', 'gpt>out.txt'], + }, + { + name: 'gemini: model includes <', + params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-lt', model: 'gpt c.command === testCase.expectedCommand); + assert.ok(execCall, `Expected spawn call for ${testCase.expectedCommand} (${testCase.name})`); + + assert.equal( + execCall.options?.shell, + isWindows, + `Expected shell=${String(isWindows)} for ${testCase.expectedCommand} (${testCase.name})` + ); + + const expectedCommand = isWindows ? escapeWindowsArg(testCase.expectedCommand) : testCase.expectedCommand; + const expectedArgs = isWindows ? testCase.expectedArgs.map(escapeWindowsArg) : testCase.expectedArgs; + + assert.equal(execCall.command, expectedCommand, `spawn command (${testCase.name})`); + assert.deepEqual(execCall.args, expectedArgs, `spawn args (${testCase.name})`); + } + }); +}); diff --git a/ccw/tests/security/credential-handling.test.ts b/ccw/tests/security/credential-handling.test.ts new file mode 100644 index 00000000..a039979d --- /dev/null +++ b/ccw/tests/security/credential-handling.test.ts @@ -0,0 +1,447 @@ +/** + * Security tests for credential handling (DSC-004). + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses an isolated CCW data directory (CCW_DATA_DIR) to avoid touching real user config. + */ + +import { after, afterEach, before, beforeEach, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, writeFileSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import path from 'node:path'; + +const CCW_HOME = mkdtempSync(path.join(tmpdir(), 'ccw-credential-tests-home-')); +const PROJECT_ROOT = mkdtempSync(path.join(tmpdir(), 'ccw-credential-tests-project-')); +const CONFIG_DIR = path.join(CCW_HOME, 'config'); +const CONFIG_PATH = path.join(CONFIG_DIR, 'litellm-api-config.json'); + +const originalEnv = { + CCW_DATA_DIR: process.env.CCW_DATA_DIR, + TEST_API_KEY: process.env.TEST_API_KEY, +}; + +process.env.CCW_DATA_DIR = CCW_HOME; + +const configManagerUrl = new URL('../../dist/config/litellm-api-config-manager.js', import.meta.url); +configManagerUrl.searchParams.set('t', String(Date.now())); + +const litellmRoutesUrl = new URL('../../dist/core/routes/litellm-api-routes.js', import.meta.url); +litellmRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let routes: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, reqPath: string, body?: unknown): Promise { + const url = new URL(reqPath, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest( + req: http.IncomingMessage, + res: http.ServerResponse, + handler: (body: unknown) => Promise, +): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> { + const server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath, + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await routes.handleLiteLLMApiRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + return { server, baseUrl: `http://127.0.0.1:${port}` }; +} + +function loadMaskApiKey(): (apiKey: string) => string { + const filePath = new URL('../../src/templates/dashboard-js/views/api-settings.js', import.meta.url); + const source = readFileSync(filePath, 'utf8'); + + const match = source.match(/function\s+maskApiKey\(apiKey\)\s*\{[\s\S]*?\r?\n\}/); + if (!match) { + throw new Error('maskApiKey function not found in api-settings.js'); + } + + // eslint-disable-next-line no-new-func + const fn = new Function(`${match[0]}; return maskApiKey;`) as () => (apiKey: string) => string; + return fn(); +} + +describe('security: credential handling', async () => { + const maskApiKey = loadMaskApiKey(); + + function listFilesRecursive(dirPath: string): string[] { + const results: string[] = []; + const entries = readdirSync(dirPath, { withFileTypes: true }); + for (const entry of entries) { + const fullPath = path.join(dirPath, entry.name); + if (entry.isDirectory()) results.push(...listFilesRecursive(fullPath)); + else if (entry.isFile()) results.push(fullPath); + } + return results; + } + + before(async () => { + mod = await import(configManagerUrl.href); + routes = await import(litellmRoutesUrl.href); + }); + + beforeEach(() => { + process.env.TEST_API_KEY = originalEnv.TEST_API_KEY; + rmSync(CONFIG_PATH, { force: true }); + }); + + afterEach(() => { + mock.restoreAll(); + }); + + after(() => { + process.env.CCW_DATA_DIR = originalEnv.CCW_DATA_DIR; + process.env.TEST_API_KEY = originalEnv.TEST_API_KEY; + rmSync(CCW_HOME, { recursive: true, force: true }); + rmSync(PROJECT_ROOT, { recursive: true, force: true }); + }); + + it('resolveEnvVar returns input unchanged when not ${ENV_VAR}', () => { + assert.equal(mod.resolveEnvVar('sk-test-1234'), 'sk-test-1234'); + assert.equal(mod.resolveEnvVar(''), ''); + }); + + it('resolveEnvVar resolves ${ENV_VAR} syntax', () => { + process.env.TEST_API_KEY = 'sk-test-resolved'; + assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), 'sk-test-resolved'); + }); + + it('resolveEnvVar returns empty string when env var is missing', () => { + delete process.env.TEST_API_KEY; + assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), ''); + }); + + it('getProviderWithResolvedEnvVars returns provider with resolvedApiKey', () => { + process.env.TEST_API_KEY = 'sk-test-resolved'; + + const provider = mod.addProvider(PROJECT_ROOT, { + name: 'Test Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id); + assert.ok(resolved); + assert.equal(resolved.id, provider.id); + assert.equal(resolved.resolvedApiKey, 'sk-test-resolved'); + }); + + it('resolveEnvVar does not log resolved credential values', () => { + const secret = 'sk-test-secret-1234567890'; + process.env.TEST_API_KEY = secret; + + const calls: string[] = []; + mock.method(console, 'log', (...args: unknown[]) => calls.push(args.map(String).join(' '))); + mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' '))); + + assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), secret); + assert.equal(calls.some((line) => line.includes(secret)), false); + }); + + it('getProviderWithResolvedEnvVars does not log resolved credential values', () => { + const secret = 'sk-test-secret-abcdef123456'; + process.env.TEST_API_KEY = secret; + + const calls: string[] = []; + mock.method(console, 'log', (...args: unknown[]) => calls.push(args.map(String).join(' '))); + mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' '))); + + const provider = mod.addProvider(PROJECT_ROOT, { + name: 'Test Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id); + assert.ok(resolved); + assert.equal(resolved.resolvedApiKey, secret); + assert.equal(calls.some((line) => line.includes(secret)), false); + }); + + it('loadLiteLLMApiConfig logs parse errors without leaking credentials', () => { + const secret = 'sk-test-secret-in-file-1234'; + mkdirSync(CONFIG_DIR, { recursive: true }); + writeFileSync(CONFIG_PATH, `{\"providers\":[{\"apiKey\":\"${secret}\"`, 'utf8'); + + const calls: string[] = []; + mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' '))); + + const config = mod.loadLiteLLMApiConfig(PROJECT_ROOT); + assert.equal(Array.isArray(config.providers), true); + assert.equal(config.providers.length, 0); + assert.equal(calls.length > 0, true); + assert.equal(calls.some((line) => line.includes(secret)), false); + }); + + it('loadLiteLLMApiConfig stack traces do not include raw credentials', () => { + const secret = 'sk-test-secret-stack-9999'; + mkdirSync(CONFIG_DIR, { recursive: true }); + writeFileSync(CONFIG_PATH, `{\"providers\":[{\"apiKey\":\"${secret}\"`, 'utf8'); + + const errorArgs: unknown[][] = []; + mock.method(console, 'error', (...args: unknown[]) => errorArgs.push(args)); + + mod.loadLiteLLMApiConfig(PROJECT_ROOT); + + const errorObj = errorArgs.flat().find((arg) => arg instanceof Error) as Error | undefined; + assert.ok(errorObj); + assert.equal(String(errorObj.stack ?? '').includes(secret), false); + }); + + it('maskApiKey hides raw keys but keeps env var references readable', () => { + assert.equal(maskApiKey(''), ''); + assert.equal(maskApiKey('${TEST_API_KEY}'), '${TEST_API_KEY}'); + assert.equal(maskApiKey('short'), '***'); + assert.equal(maskApiKey('sk-test-1234567890'), 'sk-t...7890'); + }); + + it('getProviderWithResolvedEnvVars is safe to stringify (no env var syntax or resolved secrets)', () => { + const secret = 'sk-test-secret-json-0000'; + process.env.TEST_API_KEY = secret; + + const provider = mod.addProvider(PROJECT_ROOT, { + name: 'Test Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id); + assert.ok(resolved); + + const payload = JSON.stringify(resolved); + assert.equal(payload.includes(secret), false); + assert.equal(payload.includes('${TEST_API_KEY}'), false); + assert.equal(payload.includes('resolvedApiKey'), false); + }); + + it('API responses do not expose env var syntax for provider apiKey', async () => { + process.env.TEST_API_KEY = 'sk-test-secret-api-1111'; + + mod.addProvider(PROJECT_ROOT, { + name: 'Test Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', '/api/litellm-api/providers'); + assert.equal(res.status, 200); + assert.ok(res.json?.providers); + + assert.equal(res.text.includes('${TEST_API_KEY}'), false); + assert.equal(res.text.includes('${'), false); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('API responses do not expose resolved secrets in generated rotation endpoints', async () => { + const secret = 'sk-test-secret-rotation-2222'; + process.env.TEST_API_KEY = secret; + + const provider = mod.addProvider(PROJECT_ROOT, { + name: 'Embed Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + // Ensure provider has an enabled embedding model. + mod.updateProvider(PROJECT_ROOT, provider.id, { + embeddingModels: [{ + id: 'emb-1', + name: 'text-embedding-test', + type: 'embedding', + series: 'Test', + enabled: true, + }], + }); + + // Configure legacy rotation directly in the config file (avoid auto-sync side effects). + mkdirSync(CONFIG_DIR, { recursive: true }); + const config = mod.loadLiteLLMApiConfig(PROJECT_ROOT); + config.codexlensEmbeddingRotation = { + enabled: true, + strategy: 'round_robin', + defaultCooldown: 60, + targetModel: 'text-embedding-test', + providers: [{ + providerId: provider.id, + modelId: 'emb-1', + useAllKeys: true, + weight: 1.0, + maxConcurrentPerKey: 4, + enabled: true, + }], + }; + writeFileSync(CONFIG_PATH, JSON.stringify(config, null, 2), 'utf8'); + + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', '/api/litellm-api/codexlens/rotation/endpoints'); + assert.equal(res.status, 200); + assert.ok(res.json?.endpoints); + + assert.equal(res.text.includes(secret), false); + assert.equal(res.text.includes('${TEST_API_KEY}'), false); + assert.equal(res.text.includes('${'), false); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('stores env var references without persisting resolved secrets when available', () => { + const secret = 'sk-test-secret-storage-3333'; + process.env.TEST_API_KEY = secret; + + mod.addProvider(PROJECT_ROOT, { + name: 'Stored Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const content = readFileSync(CONFIG_PATH, 'utf8'); + assert.equal(content.includes('${TEST_API_KEY}'), true); + assert.equal(content.includes(secret), false); + }); + + it('does not write resolved secrets into ancillary files under CCW_DATA_DIR', () => { + const secret = 'sk-test-secret-storage-scan-4444'; + process.env.TEST_API_KEY = secret; + + mod.addProvider(PROJECT_ROOT, { + name: 'Stored Provider', + type: 'openai', + apiKey: '${TEST_API_KEY}', + apiBase: undefined, + enabled: true, + }); + + const files = listFilesRecursive(CCW_HOME); + assert.ok(files.length > 0); + + for (const filePath of files) { + const content = readFileSync(filePath, 'utf8'); + assert.equal(content.includes(secret), false); + } + }); + + it('writes config file with restrictive permissions where supported', () => { + mod.addProvider(PROJECT_ROOT, { + name: 'Perms Provider', + type: 'openai', + apiKey: 'sk-test-raw-key', + apiBase: undefined, + enabled: true, + }); + + const stat = statSync(CONFIG_PATH); + assert.equal(stat.isFile(), true); + + if (process.platform === 'win32') return; + + // Require no permissions for group/others (0600). + const mode = stat.mode & 0o777; + assert.equal(mode & 0o077, 0); + }); +}); diff --git a/ccw/tests/security/csrf.test.ts b/ccw/tests/security/csrf.test.ts new file mode 100644 index 00000000..2a931103 --- /dev/null +++ b/ccw/tests/security/csrf.test.ts @@ -0,0 +1,294 @@ +/** + * Security regression tests for CSRF protection (DSC-006). + * + * Verifies: + * - State-changing API routes require a valid CSRF token (cookie/header/body) + * - Tokens are single-use and session-bound + * - CORS rejects non-localhost origins (browser-enforced via mismatched Allow-Origin) + * - Development bypass flag disables CSRF validation + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +type HttpResult = { + status: number; + body: string; + headers: http.IncomingHttpHeaders; +}; + +function httpRequest(options: http.RequestOptions, body?: string, timeout = 10000): Promise { + return new Promise((resolve, reject) => { + const req = http.request(options, (res) => { + let data = ''; + res.on('data', chunk => data += chunk); + res.on('end', () => resolve({ status: res.statusCode || 0, body: data, headers: res.headers })); + }); + req.on('error', reject); + req.setTimeout(timeout, () => { + req.destroy(); + reject(new Error('Request timeout')); + }); + if (body) req.write(body); + req.end(); + }); +} + +function updateCookieJar(jar: Record, setCookie: string | string[] | undefined): void { + if (!setCookie) return; + const cookies = Array.isArray(setCookie) ? setCookie : [setCookie]; + for (const cookie of cookies) { + const pair = cookie.split(';')[0]?.trim(); + if (!pair) continue; + const [name, ...valueParts] = pair.split('='); + jar[name] = valueParts.join('='); + } +} + +function cookieHeader(jar: Record): string { + return Object.entries(jar) + .map(([name, value]) => `${name}=${value}`) + .join('; '); +} + +function cloneJar(jar: Record): Record { + return { ...jar }; +} + +async function getDashboardSession(port: number): Promise<{ jar: Record; csrfHeader: string | null }> { + const jar: Record = {}; + const res = await httpRequest({ hostname: '127.0.0.1', port, path: '/', method: 'GET' }); + updateCookieJar(jar, res.headers['set-cookie']); + return { jar, csrfHeader: typeof res.headers['x-csrf-token'] === 'string' ? res.headers['x-csrf-token'] : null }; +} + +async function postNotify(port: number, jar: Record, extraHeaders?: Record, body?: unknown): Promise { + const payload = body === undefined ? { type: 'REFRESH_REQUIRED', scope: 'all' } : body; + const encoded = JSON.stringify(payload); + return httpRequest( + { + hostname: '127.0.0.1', + port, + path: '/api/system/notify', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + ...(Object.keys(jar).length ? { Cookie: cookieHeader(jar) } : {}), + ...(extraHeaders ?? {}), + }, + }, + encoded, + ); +} + +const ORIGINAL_ENV = { ...process.env }; +const serverUrl = new URL('../../dist/core/server.js', import.meta.url).href; +const csrfManagerUrl = new URL('../../dist/core/auth/csrf-manager.js', import.meta.url).href; + +describe('security: CSRF protection', async () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let serverMod: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let csrfMod: any; + + let server: http.Server; + let port: number; + let projectRoot: string; + let ccwHome: string; + + before(async () => { + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-csrf-project-')); + ccwHome = mkdtempSync(join(tmpdir(), 'ccw-csrf-home-')); + + process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome }; + + serverMod = await import(serverUrl); + csrfMod = await import(csrfManagerUrl); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + server = await serverMod.startServer({ initialPath: projectRoot, port: 0 }); + const addr = server.address(); + port = typeof addr === 'object' && addr ? addr.port : 0; + assert.ok(port > 0, 'Server should start on a valid port'); + }); + + after(async () => { + await new Promise((resolve) => server.close(() => resolve())); + mock.restoreAll(); + process.env = ORIGINAL_ENV; + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(ccwHome, { recursive: true, force: true }); + }); + + it('blocks POST requests without CSRF token', async () => { + const { jar } = await getDashboardSession(port); + delete jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar); + assert.equal(res.status, 403); + assert.ok(res.body.includes('CSRF validation failed')); + }); + + it('blocks POST requests with forged CSRF token', async () => { + const { jar } = await getDashboardSession(port); + jar['XSRF-TOKEN'] = 'forged-token'; + + const res = await postNotify(port, jar); + assert.equal(res.status, 403); + }); + + it('blocks expired CSRF tokens', async () => { + csrfMod.resetCsrfTokenManager(); + csrfMod.getCsrfTokenManager({ tokenTtlMs: 1, cleanupIntervalMs: 0 }); + + const { jar } = await getDashboardSession(port); + await new Promise(resolve => setTimeout(resolve, 10)); + + const res = await postNotify(port, jar); + assert.equal(res.status, 403); + + csrfMod.resetCsrfTokenManager(); + }); + + it('blocks token reuse (single-use tokens)', async () => { + const { jar } = await getDashboardSession(port); + const oldToken = jar['XSRF-TOKEN']; + + const first = await postNotify(port, jar); + assert.equal(first.status, 200); + updateCookieJar(jar, first.headers['set-cookie']); + + // Try again using the old token explicitly (should fail). + const reuseJar = cloneJar(jar); + reuseJar['XSRF-TOKEN'] = oldToken; + const secondUse = await postNotify(port, reuseJar); + assert.equal(secondUse.status, 403); + }); + + it('blocks CSRF token theft across sessions', async () => { + const sessionA = await getDashboardSession(port); + const sessionB = await getDashboardSession(port); + + const jar = cloneJar(sessionB.jar); + jar['XSRF-TOKEN'] = sessionA.jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar); + assert.equal(res.status, 403); + }); + + it('does not require CSRF on GET requests', async () => { + const { jar } = await getDashboardSession(port); + const res = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + headers: { Cookie: cookieHeader(jar) }, + }); + assert.equal(res.status, 200); + }); + + it('accepts CSRF token provided via cookie (legitimate flow)', async () => { + const { jar } = await getDashboardSession(port); + const res = await postNotify(port, jar); + assert.equal(res.status, 200); + }); + + it('accepts CSRF token provided via header', async () => { + const { jar } = await getDashboardSession(port); + const token = jar['XSRF-TOKEN']; + delete jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar, { 'X-CSRF-Token': token }); + assert.equal(res.status, 200); + }); + + it('accepts CSRF token provided via request body', async () => { + const { jar } = await getDashboardSession(port); + const token = jar['XSRF-TOKEN']; + delete jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar, undefined, { type: 'REFRESH_REQUIRED', scope: 'all', csrfToken: token }); + assert.equal(res.status, 200); + }); + + it('rotates CSRF token after successful POST', async () => { + const { jar } = await getDashboardSession(port); + const firstToken = jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar); + assert.equal(res.status, 200); + updateCookieJar(jar, res.headers['set-cookie']); + + assert.notEqual(jar['XSRF-TOKEN'], firstToken); + }); + + it('allows localhost origins and rejects external origins (CORS)', async () => { + const allowedOrigin = `http://localhost:${port}`; + const allowed = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + headers: { Origin: allowedOrigin }, + }); + assert.equal(allowed.headers['access-control-allow-origin'], allowedOrigin); + assert.equal(allowed.headers['vary'], 'Origin'); + + const evilOrigin = 'http://evil.com'; + const denied = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + headers: { Origin: evilOrigin }, + }); + assert.notEqual(denied.headers['access-control-allow-origin'], evilOrigin); + assert.equal(denied.headers['access-control-allow-origin'], `http://localhost:${port}`); + }); + + it('bypasses CSRF validation when CCW_DISABLE_CSRF=true', async () => { + process.env.CCW_DISABLE_CSRF = 'true'; + const { jar } = await getDashboardSession(port); + delete jar['XSRF-TOKEN']; + + const res = await postNotify(port, jar); + assert.equal(res.status, 200); + + delete process.env.CCW_DISABLE_CSRF; + }); + + it('skips CSRF validation for Authorization header auth', async () => { + const tokenRes = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/auth/token', + method: 'GET', + }); + + const parsed = JSON.parse(tokenRes.body) as { token: string }; + assert.ok(parsed.token); + + const res = await httpRequest( + { + hostname: '127.0.0.1', + port, + path: '/api/system/notify', + method: 'POST', + headers: { + Authorization: `Bearer ${parsed.token}`, + 'Content-Type': 'application/json', + }, + }, + JSON.stringify({ type: 'REFRESH_REQUIRED', scope: 'all' }), + ); + + assert.equal(res.status, 200); + }); +}); diff --git a/ccw/tests/security/path-traversal.test.ts b/ccw/tests/security/path-traversal.test.ts new file mode 100644 index 00000000..5d24e84c --- /dev/null +++ b/ccw/tests/security/path-traversal.test.ts @@ -0,0 +1,225 @@ +/** + * Regression tests for path traversal protections (DSC-005). + * + * Focus: + * - Allowlist enforcement + boundary checks (no "/allowedness" bypass) + * - Symlink target re-validation via realpath + * - Non-existent path handling via parent-directory validation + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Uses stubbed fs + fs/promises to avoid touching real filesystem. + */ + +import { after, before, beforeEach, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import path from 'node:path'; +import { createRequire } from 'node:module'; + +const require = createRequire(import.meta.url); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const fsp = require('node:fs/promises') as typeof import('node:fs/promises'); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const fs = require('node:fs') as typeof import('node:fs'); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const os = require('node:os') as typeof import('node:os'); + +const pathValidatorUrl = new URL('../../dist/utils/path-validator.js', import.meta.url); +pathValidatorUrl.searchParams.set('t', String(Date.now())); + +const pathResolverUrl = new URL('../../dist/utils/path-resolver.js', import.meta.url); +pathResolverUrl.searchParams.set('t', String(Date.now())); + +const ORIGINAL_ENV = { ...process.env }; + +function resetEnv(): void { + for (const key of Object.keys(process.env)) { + if (!(key in ORIGINAL_ENV)) delete process.env[key]; + } + for (const [key, value] of Object.entries(ORIGINAL_ENV)) { + process.env[key] = value; + } +} + +function enoent(message: string): Error & { code: string } { + const err = new Error(message) as Error & { code: string }; + err.code = 'ENOENT'; + return err; +} + +type RealpathPlan = Map; +const realpathPlan: RealpathPlan = new Map(); +const realpathCalls: string[] = []; + +const originalRealpath = fsp.realpath; +fsp.realpath = (async (p: string) => { + realpathCalls.push(p); + const planned = realpathPlan.get(p); + if (!planned) { + throw enoent(`ENOENT: no such file or directory, realpath '${p}'`); + } + if (planned.type === 'throw') throw planned.error; + return planned.value; +}) as any; + +type FsState = { + existing: Set; + realpaths: Map; +}; + +const fsState: FsState = { + existing: new Set(), + realpaths: new Map(), +}; + +function key(filePath: string): string { + return path.resolve(filePath).replace(/\\/g, '/').toLowerCase(); +} + +function setExists(filePath: string, exists: boolean): void { + const normalized = key(filePath); + if (exists) fsState.existing.add(normalized); + else fsState.existing.delete(normalized); +} + +function setRealpath(filePath: string, realPath: string): void { + fsState.realpaths.set(key(filePath), realPath); +} + +const originalFs = { + existsSync: fs.existsSync, + realpathSync: fs.realpathSync, +}; + +fs.existsSync = ((filePath: string) => fsState.existing.has(key(filePath))) as any; +fs.realpathSync = ((filePath: string) => { + const mapped = fsState.realpaths.get(key(filePath)); + return mapped ?? filePath; +}) as any; + +const originalHomedir = os.homedir; +const TEST_HOME = path.join(process.cwd(), '.tmp-ccw-security-home'); +os.homedir = () => TEST_HOME; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let pathValidator: any; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let pathResolver: any; + +describe('security: path traversal regression', async () => { + const isWindows = process.platform === 'win32'; + const allowedRoot = isWindows ? 'C:\\allowed' : '/allowed'; + const disallowedRoot = isWindows ? 'C:\\secret' : '/secret'; + + before(async () => { + pathValidator = await import(pathValidatorUrl.href); + pathResolver = await import(pathResolverUrl.href); + }); + + beforeEach(() => { + realpathCalls.length = 0; + realpathPlan.clear(); + fsState.existing.clear(); + fsState.realpaths.clear(); + resetEnv(); + }); + + it('path-validator rejects traversal/absolute escapes before realpath', async () => { + process.env.CCW_PROJECT_ROOT = allowedRoot; + const allowedDirectories = [allowedRoot]; + + const vectors: Array<{ name: string; input: string }> = [ + { name: 'absolute outside allowlist', input: path.join(disallowedRoot, 'secret.txt') }, + { name: 'allowed prefix but different dir (allowedness)', input: `${allowedRoot}ness${isWindows ? '\\\\' : '/'}file.txt` }, + { name: 'allowed prefix but different dir (allowed-evil)', input: `${allowedRoot}-evil${isWindows ? '\\\\' : '/'}file.txt` }, + { name: 'absolute contains .. segment escaping allowlist', input: `${allowedRoot}${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'absolute multi-.. escaping allowlist', input: `${allowedRoot}${isWindows ? '\\\\' : '/'}sub${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'relative traversal one level', input: `..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'relative traversal two levels', input: `..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'mixed separators traversal', input: `sub${isWindows ? '/' : '/'}..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'posix absolute escape', input: '/etc/passwd' }, + { name: 'encoded traversal (decoded once)', input: decodeURIComponent('%2e%2e%2f%2e%2e%2fetc%2fpasswd') }, + { name: 'double-encoded traversal (decoded twice)', input: decodeURIComponent(decodeURIComponent('%252e%252e%252f%252e%252e%252fetc%252fpasswd')) }, + { name: 'leading dot traversal', input: `.${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` }, + { name: 'nested traversal escape', input: 'sub/../../secret.txt' }, + { name: 'alt-drive absolute escape', input: isWindows ? 'D:\\\\secret\\\\file.txt' : '/var/secret/file.txt' }, + { name: 'UNC/extended path escape', input: isWindows ? '\\\\\\\\?\\\\C:\\\\secret\\\\file.txt' : '/private/secret/file.txt' }, + ]; + + for (const vector of vectors) { + await assert.rejects( + pathValidator.validatePath(vector.input, { allowedDirectories }), + (err: any) => err instanceof Error && err.message.includes('Access denied: path'), + vector.name, + ); + } + + assert.deepEqual(realpathCalls, []); + }); + + it('path-validator enforces directory-boundary allowlists', async () => { + process.env.CCW_PROJECT_ROOT = allowedRoot; + const allowedDirectories = [path.join(allowedRoot, 'dir')]; + + await assert.rejects( + pathValidator.validatePath(path.join(allowedRoot, 'dir-malicious', 'file.txt'), { allowedDirectories }), + (err: any) => err instanceof Error && err.message.includes('Access denied: path'), + ); + + const okPath = path.join(allowedRoot, 'dir', 'file.txt'); + const resolvedOk = await pathValidator.validatePath(okPath, { allowedDirectories }); + assert.equal(pathValidator.isPathWithinAllowedDirectories(resolvedOk, allowedDirectories), true); + }); + + it('path-validator rejects symlink targets outside allowlist', async () => { + const linkPath = path.join(allowedRoot, 'link.txt'); + realpathPlan.set(linkPath, { type: 'return', value: path.join(disallowedRoot, 'target.txt') }); + + await assert.rejects( + pathValidator.validatePath(linkPath, { allowedDirectories: [allowedRoot] }), + (err: any) => err instanceof Error && err.message.includes('symlink target'), + ); + }); + + it('path-validator rejects non-existent paths when the parent resolves outside allowlist', async () => { + const linkDir = path.join(allowedRoot, 'linkdir'); + const newFile = path.join(linkDir, 'newfile.txt'); + + realpathPlan.set(newFile, { type: 'throw', error: enoent('missing') }); + realpathPlan.set(linkDir, { type: 'return', value: disallowedRoot }); + + await assert.rejects( + pathValidator.validatePath(newFile, { allowedDirectories: [allowedRoot] }), + (err: any) => err instanceof Error && err.message.includes('parent directory'), + ); + }); + + it('path-resolver validates baseDir before and after symlink resolution', () => { + const baseDir = allowedRoot; + setExists(baseDir, true); + + const traversal = pathResolver.validatePath(`${baseDir}${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret`, { baseDir }); + assert.equal(traversal.valid, false); + assert.ok(traversal.error?.includes('Path must be within')); + + const linkPath = path.join(baseDir, 'link'); + setExists(linkPath, true); + setRealpath(linkPath, disallowedRoot); + const symlinkEscape = pathResolver.validatePath(linkPath, { baseDir }); + assert.equal(symlinkEscape.valid, false); + assert.ok(symlinkEscape.error?.includes('Path must be within')); + + setExists(linkPath, true); + const symlinkParentEscape = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir }); + assert.equal(symlinkParentEscape.valid, false); + assert.ok(symlinkParentEscape.error?.includes('Path must be within')); + }); +}); + +after(() => { + fsp.realpath = originalRealpath; + fs.existsSync = originalFs.existsSync; + fs.realpathSync = originalFs.realpathSync; + os.homedir = originalHomedir; + resetEnv(); +}); diff --git a/ccw/tests/server-auth.integration.test.ts b/ccw/tests/server-auth.integration.test.ts new file mode 100644 index 00000000..981ee878 --- /dev/null +++ b/ccw/tests/server-auth.integration.test.ts @@ -0,0 +1,151 @@ +/** + * Integration tests for server authentication flow. + * + * Verifies: + * - API routes require auth token + * - /api/auth/token returns token + cookie for localhost requests + * - Authorization header and cookie auth both work + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +type HttpResult = { + status: number; + body: string; + headers: http.IncomingHttpHeaders; +}; + +function httpRequest(options: http.RequestOptions, body?: string, timeout = 10000): Promise { + return new Promise((resolve, reject) => { + const req = http.request(options, (res) => { + let data = ''; + res.on('data', chunk => data += chunk); + res.on('end', () => resolve({ status: res.statusCode || 0, body: data, headers: res.headers })); + }); + req.on('error', reject); + req.setTimeout(timeout, () => { + req.destroy(); + reject(new Error('Request timeout')); + }); + if (body) req.write(body); + req.end(); + }); +} + +const ORIGINAL_ENV = { ...process.env }; +const serverUrl = new URL('../dist/core/server.js', import.meta.url); +serverUrl.searchParams.set('t', String(Date.now())); + +describe('server authentication integration', async () => { + let server: http.Server; + let port: number; + let projectRoot: string; + let ccwHome: string; + + before(async () => { + projectRoot = mkdtempSync(join(tmpdir(), 'ccw-auth-project-')); + ccwHome = mkdtempSync(join(tmpdir(), 'ccw-auth-home-')); + + process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome }; + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const serverMod: any = await import(serverUrl.href); + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + server = await serverMod.startServer({ initialPath: projectRoot, port: 0 }); + const addr = server.address(); + port = typeof addr === 'object' && addr ? addr.port : 0; + assert.ok(port > 0, 'Server should start on a valid port'); + }); + + after(async () => { + await new Promise((resolve) => { + server.close(() => resolve()); + }); + mock.restoreAll(); + process.env = ORIGINAL_ENV; + rmSync(projectRoot, { recursive: true, force: true }); + rmSync(ccwHome, { recursive: true, force: true }); + }); + + it('rejects unauthenticated API requests with 401', async () => { + const response = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + }); + + assert.equal(response.status, 401); + assert.ok(response.body.includes('Unauthorized')); + }); + + it('returns auth token and cookie for localhost requests', async () => { + const response = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/auth/token', + method: 'GET', + }); + + assert.equal(response.status, 200); + const data = JSON.parse(response.body) as { token: string; expiresAt: string }; + assert.ok(data.token); + assert.ok(data.expiresAt); + + const setCookie = response.headers['set-cookie']; + assert.ok(setCookie && setCookie.length > 0, 'Expected Set-Cookie header'); + }); + + it('accepts Authorization header on API routes', async () => { + const tokenResponse = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/auth/token', + method: 'GET', + }); + + const { token } = JSON.parse(tokenResponse.body) as { token: string }; + const response = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + headers: { + Authorization: `Bearer ${token}`, + }, + }); + + assert.equal(response.status, 200); + }); + + it('accepts cookie auth on API routes', async () => { + const tokenResponse = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/auth/token', + method: 'GET', + }); + + const { token } = JSON.parse(tokenResponse.body) as { token: string }; + const response = await httpRequest({ + hostname: '127.0.0.1', + port, + path: '/api/health', + method: 'GET', + headers: { + Cookie: `auth_token=${encodeURIComponent(token)}`, + }, + }); + + assert.equal(response.status, 200); + }); +}); + diff --git a/ccw/tests/server.test.ts b/ccw/tests/server.test.ts new file mode 100644 index 00000000..50dbdbed --- /dev/null +++ b/ccw/tests/server.test.ts @@ -0,0 +1,98 @@ +/** + * Unit tests for server binding defaults and host option plumbing. + */ + +import { afterEach, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const ORIGINAL_ENV = { ...process.env }; + +const serverUrl = new URL('../dist/core/server.js', import.meta.url); +serverUrl.searchParams.set('t', String(Date.now())); + +const serveUrl = new URL('../dist/commands/serve.js', import.meta.url); +serveUrl.searchParams.set('t', String(Date.now())); + +describe('server binding', async () => { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let serverMod: any; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + let serveMod: any; + + before(async () => { + serverMod = await import(serverUrl.href); + serveMod = await import(serveUrl.href); + }); + + afterEach(() => { + mock.restoreAll(); + process.env = ORIGINAL_ENV; + }); + + it('binds to 127.0.0.1 by default', async () => { + const ccwHome = mkdtempSync(join(tmpdir(), 'ccw-server-bind-home-')); + process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome }; + + const listenCalls: any[] = []; + const originalListen = http.Server.prototype.listen; + + mock.method(http.Server.prototype as any, 'listen', function (this: any, ...args: any[]) { + listenCalls.push(args); + return (originalListen as any).apply(this, args); + }); + + const server: http.Server = await serverMod.startServer({ initialPath: process.cwd(), port: 0 }); + await new Promise((resolve) => server.close(() => resolve())); + + rmSync(ccwHome, { recursive: true, force: true }); + + assert.ok(listenCalls.length > 0, 'Expected server.listen to be called'); + assert.equal(listenCalls[0][1], '127.0.0.1'); + }); + + it('passes host option through serve command', async () => { + const ccwHome = mkdtempSync(join(tmpdir(), 'ccw-serve-bind-home-')); + process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome }; + + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + + let sigintHandler: (() => void) | null = null; + const originalOn = process.on.bind(process); + mock.method(process as any, 'on', (event: string, handler: any) => { + if (event === 'SIGINT') { + sigintHandler = handler; + return process; + } + return originalOn(event, handler); + }); + + const exitCodes: Array = []; + mock.method(process as any, 'exit', (code?: number) => { + exitCodes.push(code); + }); + + const listenCalls: any[] = []; + const originalListen = http.Server.prototype.listen; + mock.method(http.Server.prototype as any, 'listen', function (this: any, ...args: any[]) { + listenCalls.push(args); + return (originalListen as any).apply(this, args); + }); + + await serveMod.serveCommand({ port: 0, browser: false, path: process.cwd(), host: '0.0.0.0' }); + assert.ok(sigintHandler, 'Expected serveCommand to register SIGINT handler'); + + sigintHandler?.(); + await new Promise((resolve) => setTimeout(resolve, 300)); + + rmSync(ccwHome, { recursive: true, force: true }); + + assert.ok(exitCodes.includes(0)); + assert.ok(listenCalls.some((args) => args[1] === '0.0.0.0')); + }); +}); + diff --git a/ccw/tests/shell-escape.test.ts b/ccw/tests/shell-escape.test.ts new file mode 100644 index 00000000..dd7057d9 --- /dev/null +++ b/ccw/tests/shell-escape.test.ts @@ -0,0 +1,47 @@ +/** + * Unit tests for Windows cmd.exe argument escaping (ccw/dist/utils/shell-escape.js) + */ + +import { describe, it } from 'node:test'; +import assert from 'node:assert/strict'; + +const shellEscapeUrl = new URL('../dist/utils/shell-escape.js', import.meta.url).href; + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +describe('escapeWindowsArg', async () => { + mod = await import(shellEscapeUrl); + + it('escapes cmd.exe metacharacters with caret', () => { + const cases: Array<{ input: string; expected: string }> = [ + { input: 'arg|command', expected: 'arg^|command' }, + { input: 'arg&command', expected: 'arg^&command' }, + { input: 'arg&&command', expected: 'arg^&^&command' }, + { input: 'arg||command', expected: 'arg^|^|command' }, + { input: 'arg>out.txt', expected: 'arg^>out.txt' }, + { input: 'arg>>out.txt', expected: 'arg^>^>out.txt' }, + { input: 'arg { + assert.equal(mod.escapeWindowsArg('hello world'), '"hello world"'); + assert.equal(mod.escapeWindowsArg('test & echo'), '"test ^& echo"'); + assert.equal(mod.escapeWindowsArg('a|b c'), '"a^|b c"'); + }); + + it('handles empty arguments', () => { + assert.equal(mod.escapeWindowsArg(''), '""'); + }); +}); + diff --git a/ccw/tests/skills-routes.test.ts b/ccw/tests/skills-routes.test.ts new file mode 100644 index 00000000..b9a36ccf --- /dev/null +++ b/ccw/tests/skills-routes.test.ts @@ -0,0 +1,179 @@ +/** + * Integration tests for skills routes path validation. + * + * Notes: + * - Targets runtime implementation shipped in `ccw/dist`. + * - Focuses on access control for projectPath and traversal attempts. + */ + +import { after, before, describe, it, mock } from 'node:test'; +import assert from 'node:assert/strict'; +import http from 'node:http'; +import { mkdtempSync, rmSync } from 'node:fs'; +import { tmpdir } from 'node:os'; +import { join } from 'node:path'; + +const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-skills-routes-project-')); +const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-skills-routes-outside-')); + +const skillsRoutesUrl = new URL('../dist/core/routes/skills-routes.js', import.meta.url); +skillsRoutesUrl.searchParams.set('t', String(Date.now())); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +type JsonResponse = { status: number; json: any; text: string }; + +async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise { + const url = new URL(path, baseUrl); + const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8'); + + return new Promise((resolve, reject) => { + const req = http.request( + url, + { + method, + headers: { + Accept: 'application/json', + ...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}), + }, + }, + (res) => { + let responseBody = ''; + res.on('data', (chunk) => { + responseBody += chunk.toString(); + }); + res.on('end', () => { + let json: any = null; + try { + json = responseBody ? JSON.parse(responseBody) : null; + } catch { + json = null; + } + resolve({ status: res.statusCode || 0, json, text: responseBody }); + }); + }, + ); + req.on('error', reject); + if (payload) req.write(payload); + req.end(); + }); +} + +function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise): void { + let body = ''; + req.on('data', (chunk) => { + body += chunk.toString(); + }); + req.on('end', async () => { + try { + const parsed = body ? JSON.parse(body) : {}; + const result = await handler(parsed); + + if (result?.error) { + res.writeHead(result.status || 500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: result.error })); + } else { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify(result)); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); +} + +async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> { + const server = http.createServer(async (req, res) => { + const url = new URL(req.url || '/', 'http://localhost'); + const pathname = url.pathname; + + const ctx = { + pathname, + url, + req, + res, + initialPath, + handlePostRequest, + broadcastToClients() {}, + }; + + try { + const handled = await mod.handleSkillsRoutes(ctx); + if (!handled) { + res.writeHead(404, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: 'Not Found' })); + } + } catch (err: any) { + res.writeHead(500, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ error: err?.message || String(err) })); + } + }); + + await new Promise((resolve) => server.listen(0, () => resolve())); + const addr = server.address(); + const port = typeof addr === 'object' && addr ? addr.port : 0; + return { server, baseUrl: `http://127.0.0.1:${port}` }; +} + +describe('skills routes path validation', async () => { + before(async () => { + mock.method(console, 'log', () => {}); + mock.method(console, 'error', () => {}); + mod = await import(skillsRoutesUrl.href); + }); + + after(() => { + mock.restoreAll(); + rmSync(PROJECT_ROOT, { recursive: true, force: true }); + rmSync(OUTSIDE_ROOT, { recursive: true, force: true }); + }); + + it('GET /api/skills rejects projectPath outside initialPath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', `/api/skills?path=${encodeURIComponent(OUTSIDE_ROOT)}`); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('GET /api/skills/:name/dir rejects traversal via subpath', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const subpath = encodeURIComponent('../..'); + const pathParam = encodeURIComponent(PROJECT_ROOT); + const res = await requestJson(baseUrl, 'GET', `/api/skills/demo/dir?subpath=${subpath}&path=${pathParam}&location=project`); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('GET /api/skills/:name rejects traversal via path segment', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const res = await requestJson(baseUrl, 'GET', '/api/skills/../../secret?location=project'); + assert.equal(res.status, 403); + assert.equal(res.json.error, 'Access denied'); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); + + it('GET /api/skills/:name/dir rejects unsafe skill names', async () => { + const { server, baseUrl } = await createServer(PROJECT_ROOT); + try { + const pathParam = encodeURIComponent(PROJECT_ROOT); + const res = await requestJson(baseUrl, 'GET', `/api/skills/${encodeURIComponent('bad..name')}/dir?path=${pathParam}&location=project`); + assert.equal(res.status, 400); + assert.ok(String(res.json.error).includes('Invalid skill name')); + } finally { + await new Promise((resolve) => server.close(() => resolve())); + } + }); +}); diff --git a/ccw/tests/token-manager.test.ts b/ccw/tests/token-manager.test.ts new file mode 100644 index 00000000..824e955a --- /dev/null +++ b/ccw/tests/token-manager.test.ts @@ -0,0 +1,178 @@ +/** + * Unit tests for TokenManager authentication helper. + * + * Notes: + * - Targets the runtime implementation shipped in `ccw/dist`. + * - Uses in-memory fs stubs (no real file IO). + */ + +import { after, beforeEach, describe, it } from 'node:test'; +import assert from 'node:assert/strict'; +import path from 'node:path'; +import { createRequire } from 'node:module'; + +const require = createRequire(import.meta.url); +// eslint-disable-next-line @typescript-eslint/no-var-requires +const fs = require('node:fs') as typeof import('node:fs'); + +const ORIGINAL_ENV = { ...process.env }; +const TEST_CCW_HOME = path.join(process.cwd(), '.tmp-ccw-auth-home'); +process.env.CCW_DATA_DIR = TEST_CCW_HOME; + +type FsState = { + existing: Set; + files: Map; + mkdirCalls: Array<{ path: string; options: unknown }>; + writeCalls: Array<{ path: string; data: string; options: unknown }>; + chmodCalls: Array<{ path: string; mode: number }>; +}; + +const state: FsState = { + existing: new Set(), + files: new Map(), + mkdirCalls: [], + writeCalls: [], + chmodCalls: [], +}; + +function key(filePath: string): string { + return path.resolve(filePath).replace(/\\/g, '/').toLowerCase(); +} + +function setExists(filePath: string): void { + state.existing.add(key(filePath)); +} + +function setFile(filePath: string, content: string): void { + const normalized = key(filePath); + state.files.set(normalized, content); + state.existing.add(normalized); +} + +const originalFs = { + existsSync: fs.existsSync, + mkdirSync: fs.mkdirSync, + readFileSync: fs.readFileSync, + writeFileSync: fs.writeFileSync, + chmodSync: fs.chmodSync, +}; + +fs.existsSync = ((filePath: string) => state.existing.has(key(filePath))) as any; +fs.mkdirSync = ((dirPath: string, options: unknown) => { + state.mkdirCalls.push({ path: dirPath, options }); + setExists(dirPath); +}) as any; +fs.readFileSync = ((filePath: string, encoding: string) => { + assert.equal(encoding, 'utf8'); + const content = state.files.get(key(filePath)); + if (content !== undefined) return content; + + // Allow Node/third-party modules (e.g., jsonwebtoken) to load normally. + return originalFs.readFileSync(filePath, encoding); +}) as any; +fs.writeFileSync = ((filePath: string, data: string, options: unknown) => { + state.writeCalls.push({ path: filePath, data: String(data), options }); + setFile(filePath, String(data)); +}) as any; +fs.chmodSync = ((filePath: string, mode: number) => { + state.chmodCalls.push({ path: filePath, mode }); +}) as any; + +const tokenManagerUrl = new URL('../dist/core/auth/token-manager.js', import.meta.url).href; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +let mod: any; + +beforeEach(() => { + state.existing.clear(); + state.files.clear(); + state.mkdirCalls.length = 0; + state.writeCalls.length = 0; + state.chmodCalls.length = 0; +}); + +describe('TokenManager authentication helper', async () => { + mod = await import(tokenManagerUrl); + + it('generateToken produces a valid HS256 JWT with 24h expiry', () => { + const manager = new mod.TokenManager(); + const secret = 's'.repeat(64); + const now = Date.now(); + + const result = manager.generateToken(secret); + assert.ok(result.token.includes('.')); + assert.ok(result.expiresAt instanceof Date); + + const [headerB64] = result.token.split('.'); + const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString('utf8')) as { alg?: string }; + assert.equal(header.alg, 'HS256'); + + const msUntilExpiry = result.expiresAt.getTime() - now; + assert.ok(msUntilExpiry > 23 * 60 * 60 * 1000); + assert.ok(msUntilExpiry < 24 * 60 * 60 * 1000 + 60 * 1000); + }); + + it('validateToken accepts correct secret and rejects wrong secret', () => { + const manager = new mod.TokenManager(); + const secret = 'my-secret'; + const { token } = manager.generateToken(secret); + + assert.equal(manager.validateToken(token, secret), true); + assert.equal(manager.validateToken(token, 'wrong-secret'), false); + }); + + it('validateToken rejects expired tokens', () => { + const manager = new mod.TokenManager({ tokenTtlMs: -1000 }); + const secret = 'my-secret'; + const { token } = manager.generateToken(secret); + + assert.equal(manager.validateToken(token, secret), false); + }); + + it('persists and reloads secret key with restrictive permissions', () => { + const authDir = path.join(TEST_CCW_HOME, 'auth'); + const secretPath = path.join(authDir, 'secret.key'); + + const manager1 = new mod.TokenManager({ authDir, secretKeyPath: secretPath }); + const secret1 = manager1.getSecretKey(); + + assert.equal(secret1.length, 64); // 32 bytes hex + assert.equal(state.writeCalls.length, 1); + assert.equal(state.writeCalls[0].path, secretPath); + assert.deepEqual(state.writeCalls[0].options, { encoding: 'utf8', mode: 0o600 }); + assert.deepEqual(state.chmodCalls, [{ path: secretPath, mode: 0o600 }]); + + const manager2 = new mod.TokenManager({ authDir, secretKeyPath: secretPath }); + const secret2 = manager2.getSecretKey(); + assert.equal(secret2, secret1); + }); + + it('rotates token before expiry and persists updated token', () => { + const authDir = path.join(TEST_CCW_HOME, 'auth'); + const tokenPath = path.join(authDir, 'token.jwt'); + + const manager = new mod.TokenManager({ + authDir, + tokenPath, + tokenTtlMs: 1000, + rotateBeforeExpiryMs: 2000, + }); + + const first = manager.getOrCreateAuthToken(); + const tokenFileFirst = state.files.get(key(tokenPath)); + assert.equal(tokenFileFirst, first.token); + + const second = manager.getOrCreateAuthToken(); + const tokenFileSecond = state.files.get(key(tokenPath)); + assert.equal(tokenFileSecond, second.token); + assert.notEqual(second.token, first.token); + }); +}); + +after(() => { + fs.existsSync = originalFs.existsSync; + fs.mkdirSync = originalFs.mkdirSync; + fs.readFileSync = originalFs.readFileSync; + fs.writeFileSync = originalFs.writeFileSync; + fs.chmodSync = originalFs.chmodSync; + process.env = ORIGINAL_ENV; +}); diff --git a/codex-lens/tests/test_rrf_fusion.py b/codex-lens/tests/test_rrf_fusion.py index ed515f51..762d4b54 100644 --- a/codex-lens/tests/test_rrf_fusion.py +++ b/codex-lens/tests/test_rrf_fusion.py @@ -315,6 +315,24 @@ class TestNormalizeWeights: assert normalized is not weights assert normalized == weights + def test_normalize_weights_with_zero_total(self): + """Zero total returns unchanged weights without division.""" + weights = {"exact": 0.0, "fuzzy": 0.0} + + normalized = normalize_weights(weights) + + assert normalized is not weights + assert normalized == weights + + def test_normalize_weights_with_negative_total(self): + """Negative total returns unchanged weights without division.""" + weights = {"exact": -1.0, "fuzzy": -0.5} + + normalized = normalize_weights(weights) + + assert normalized is not weights + assert normalized == weights + def test_normalize_weights_valid_total_normalizes(self): """Valid finite positive total performs normalization correctly.""" weights = {"exact": 2.0, "fuzzy": 1.0} diff --git a/package-lock.json b/package-lock.json index 5e3bf7b9..719a0313 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "claude-code-workflow", - "version": "6.3.16", + "version": "6.3.18", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "claude-code-workflow", - "version": "6.3.16", + "version": "6.3.18", "license": "MIT", "dependencies": { "@modelcontextprotocol/sdk": "^1.0.4", @@ -18,6 +18,7 @@ "glob": "^10.3.0", "gradient-string": "^2.0.2", "inquirer": "^9.2.0", + "jsonwebtoken": "^9.0.3", "open": "^9.1.0", "ora": "^7.0.0", "zod": "^4.1.13" @@ -31,6 +32,7 @@ "@types/better-sqlite3": "^7.6.12", "@types/gradient-string": "^1.1.6", "@types/inquirer": "^9.0.9", + "@types/jsonwebtoken": "^9.0.10", "@types/node": "^25.0.1", "pixelmatch": "^7.1.0", "playwright": "^1.57.0", @@ -223,6 +225,24 @@ "rxjs": "^7.2.0" } }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.10", + "resolved": "https://registry.npmmirror.com/@types/jsonwebtoken/-/jsonwebtoken-9.0.10.tgz", + "integrity": "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*", + "@types/node": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmmirror.com/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { "version": "25.0.3", "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz", @@ -529,6 +549,12 @@ "ieee754": "^1.2.1" } }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmmirror.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, "node_modules/bundle-name": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/bundle-name/-/bundle-name-3.0.0.tgz", @@ -906,6 +932,15 @@ "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", "license": "MIT" }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmmirror.com/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, "node_modules/ee-first": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", @@ -1886,6 +1921,91 @@ "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", "license": "BSD-2-Clause" }, + "node_modules/jsonwebtoken": { + "version": "9.0.3", + "resolved": "https://registry.npmmirror.com/jsonwebtoken/-/jsonwebtoken-9.0.3.tgz", + "integrity": "sha512-MT/xP0CrubFRNLNKvxJ2BYfy53Zkm++5bX9dtuPbqAeQpTVe0MQTFhao8+Cp//EmJp244xt6Drw/GVEGCUj40g==", + "license": "MIT", + "dependencies": { + "jws": "^4.0.1", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jwa": { + "version": "2.0.1", + "resolved": "https://registry.npmmirror.com/jwa/-/jwa-2.0.1.tgz", + "integrity": "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "^1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/jws/-/jws-4.0.1.tgz", + "integrity": "sha512-EKI/M/yqPncGUUh44xz0PxSidXFr/+r0pA70+gIYhjv+et7yxM+s29Y+VGDkovRofQem0fs7Uvf4+YmAdyRduA==", + "license": "MIT", + "dependencies": { + "jwa": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmmirror.com/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmmirror.com/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "license": "MIT" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmmirror.com/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmmirror.com/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "license": "MIT" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmmirror.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "license": "MIT" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmmirror.com/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "license": "MIT" + }, "node_modules/log-symbols": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", diff --git a/package.json b/package.json index 64f1f492..8f3ce053 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ "glob": "^10.3.0", "gradient-string": "^2.0.2", "inquirer": "^9.2.0", + "jsonwebtoken": "^9.0.3", "open": "^9.1.0", "ora": "^7.0.0", "zod": "^4.1.13" @@ -80,6 +81,7 @@ "@types/better-sqlite3": "^7.6.12", "@types/gradient-string": "^1.1.6", "@types/inquirer": "^9.0.9", + "@types/jsonwebtoken": "^9.0.10", "@types/node": "^25.0.1", "pixelmatch": "^7.1.0", "playwright": "^1.57.0",