mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-05 01:50:27 +08:00
Merge branch 'main' of https://github.com/catlog22/Claude-Code-Workflow
This commit is contained in:
104
ccw/docs/SECURITY.md
Normal file
104
ccw/docs/SECURITY.md
Normal file
@@ -0,0 +1,104 @@
|
||||
# CCW Dashboard Server Security
|
||||
|
||||
This document describes the CCW dashboard server security model, authentication, and recommended deployment practices.
|
||||
|
||||
## Summary
|
||||
|
||||
- **Authentication**: API endpoints require a JWT token (header or cookie).
|
||||
- **Default binding**: Server binds to `127.0.0.1` by default to avoid network exposure.
|
||||
- **CORS**: Only localhost origins are allowed; wildcard CORS is not used.
|
||||
|
||||
## Authentication Model
|
||||
|
||||
### Token Types
|
||||
|
||||
CCW uses **JWT (HS256)** tokens for API authentication:
|
||||
|
||||
- **Header-based**: `Authorization: Bearer <token>`
|
||||
- **Cookie-based**: `auth_token=<token>` (set automatically for local browser access)
|
||||
|
||||
### Token Generation & Storage
|
||||
|
||||
On server start, CCW generates or reuses:
|
||||
|
||||
- **Secret key** (random 256-bit minimum): stored at `~/.ccw/auth/secret.key` (or under `CCW_DATA_DIR`)
|
||||
- **Current token**: stored at `~/.ccw/auth/token.jwt` (or under `CCW_DATA_DIR`)
|
||||
|
||||
Tokens have a **24-hour expiry**. CCW rotates tokens when re-generated near expiry.
|
||||
|
||||
> **Note**: On Windows, POSIX-style `0600` permissions are best-effort; CCW still writes files with restrictive modes where supported.
|
||||
|
||||
### Retrieving a Token
|
||||
|
||||
To retrieve the current token from the local machine:
|
||||
|
||||
```bash
|
||||
curl -s http://127.0.0.1:3456/api/auth/token
|
||||
```
|
||||
|
||||
This endpoint is **localhost-only** (loopback). It also sets a `HttpOnly` cookie for browser clients.
|
||||
|
||||
### Using a Token
|
||||
|
||||
Example (header-based):
|
||||
|
||||
```bash
|
||||
curl -H "Authorization: Bearer <token>" http://127.0.0.1:3456/api/health
|
||||
```
|
||||
|
||||
Browser clients typically use cookie auth automatically when the dashboard is opened from `http://127.0.0.1:<port>` or `http://localhost:<port>`.
|
||||
|
||||
## Network Binding (Localhost by Default)
|
||||
|
||||
By default, CCW binds to `127.0.0.1`:
|
||||
|
||||
```bash
|
||||
ccw serve --host 127.0.0.1 --port 3456
|
||||
```
|
||||
|
||||
To bind to all interfaces (advanced / higher risk):
|
||||
|
||||
```bash
|
||||
ccw serve --host 0.0.0.0 --port 3456
|
||||
```
|
||||
|
||||
Binding to non-localhost addresses exposes the dashboard API to the network. Only do this if you understand the risk and have controls in place.
|
||||
|
||||
### Recommendations if Using `--host`
|
||||
|
||||
- Use a host firewall to restrict inbound access to trusted IPs.
|
||||
- Prefer VPN access over opening ports publicly.
|
||||
- Treat the JWT token as a password; never share it.
|
||||
|
||||
## CORS Policy
|
||||
|
||||
CCW no longer uses `Access-Control-Allow-Origin: *`.
|
||||
|
||||
- Allowed origins are restricted to:
|
||||
- `http://localhost:<port>`
|
||||
- `http://127.0.0.1:<port>`
|
||||
- `Access-Control-Allow-Credentials: true` is set to support cookie auth.
|
||||
|
||||
## Threat Model (What This Protects)
|
||||
|
||||
Designed to mitigate:
|
||||
|
||||
- Accidental exposure of dashboard APIs on a LAN/Wi‑Fi network.
|
||||
- Cross-origin attacks from untrusted web pages attempting to call local APIs.
|
||||
|
||||
Not designed to protect against:
|
||||
|
||||
- A fully compromised local machine/user account.
|
||||
- Deliberately exposing the server to the internet without additional perimeter security.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### `401 Unauthorized`
|
||||
|
||||
- Visit the dashboard page again (cookie is re-issued for localhost access), or
|
||||
- Call `GET /api/auth/token` and use the returned token in the `Authorization` header.
|
||||
|
||||
### Token Expired
|
||||
|
||||
- Call `GET /api/auth/token` to refresh/rotate the token.
|
||||
|
||||
@@ -83,6 +83,7 @@ export function run(argv: string[]): void {
|
||||
.description('Open workflow dashboard server with live path switching')
|
||||
.option('-p, --path <path>', 'Path to project directory', '.')
|
||||
.option('--port <port>', 'Server port', '3456')
|
||||
.option('--host <host>', 'Server host to bind', '127.0.0.1')
|
||||
.option('--no-browser', 'Start server without opening browser')
|
||||
.action(viewCommand);
|
||||
|
||||
@@ -92,6 +93,7 @@ export function run(argv: string[]): void {
|
||||
.description('Alias for view command')
|
||||
.option('-p, --path <path>', 'Initial project directory')
|
||||
.option('--port <port>', 'Server port', '3456')
|
||||
.option('--host <host>', 'Server host to bind', '127.0.0.1')
|
||||
.option('--no-browser', 'Start server without opening browser')
|
||||
.action(serveCommand);
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import chalk from 'chalk';
|
||||
import http from 'http';
|
||||
import inquirer from 'inquirer';
|
||||
import {
|
||||
cliExecutorTool,
|
||||
getCliToolsStatus,
|
||||
@@ -26,6 +27,7 @@ import {
|
||||
getStorageLocationInstructions
|
||||
} from '../tools/storage-manager.js';
|
||||
import { getHistoryStore } from '../tools/cli-history-store.js';
|
||||
import { createSpinner } from '../utils/ui.js';
|
||||
|
||||
// Dashboard notification settings
|
||||
const DASHBOARD_PORT = process.env.CCW_PORT || 3456;
|
||||
@@ -280,12 +282,17 @@ async function cleanStorage(options: StorageOptions): Promise<void> {
|
||||
}
|
||||
|
||||
if (!force) {
|
||||
console.log(chalk.bold.yellow('\n Warning: This will delete ALL CCW storage:'));
|
||||
console.log(` Location: ${stats.rootPath}`);
|
||||
console.log(` Projects: ${stats.projectCount}`);
|
||||
console.log(` Size: ${formatBytes(stats.totalSize)}`);
|
||||
console.log(chalk.gray('\n Use --force to confirm deletion.\n'));
|
||||
return;
|
||||
const { proceed } = await inquirer.prompt([{
|
||||
type: 'confirm',
|
||||
name: 'proceed',
|
||||
message: `Delete ALL CCW storage? This will remove ${stats.projectCount} projects (${formatBytes(stats.totalSize)}). This action cannot be undone.`,
|
||||
default: false
|
||||
}]);
|
||||
|
||||
if (!proceed) {
|
||||
console.log(chalk.yellow('\n Storage clean cancelled.\n'));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(chalk.bold.cyan('\n Cleaning all storage...\n'));
|
||||
@@ -554,6 +561,11 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
} else if (optionPrompt) {
|
||||
// Use --prompt/-p option (preferred for multi-line)
|
||||
finalPrompt = optionPrompt;
|
||||
const promptLineCount = optionPrompt.split(/\r?\n/).length;
|
||||
if (promptLineCount > 3) {
|
||||
console.log(chalk.dim(' 💡 Tip: Use --file option to avoid shell escaping issues with multi-line prompts'));
|
||||
console.log(chalk.dim(' Example: ccw cli -f prompt.txt --tool gemini'));
|
||||
}
|
||||
} else {
|
||||
// Fall back to positional argument
|
||||
finalPrompt = positionalPrompt;
|
||||
@@ -705,7 +717,6 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
}
|
||||
const nativeMode = noNative ? ' (prompt-concat)' : '';
|
||||
const idInfo = id ? ` [${id}]` : '';
|
||||
console.log(chalk.cyan(`\n Executing ${tool} (${mode} mode${resumeInfo}${nativeMode})${idInfo}...\n`));
|
||||
|
||||
// Show merge details
|
||||
if (isMerge) {
|
||||
@@ -719,11 +730,31 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
// Generate execution ID for streaming (use custom ID or timestamp-based)
|
||||
const executionId = id || `${Date.now()}-${tool}`;
|
||||
const startTime = Date.now();
|
||||
const spinnerBaseText = `Executing ${tool} (${mode} mode${resumeInfo}${nativeMode})${idInfo}...`;
|
||||
console.log();
|
||||
|
||||
const spinner = stream ? null : createSpinner(` ${spinnerBaseText}`).start();
|
||||
const elapsedInterval = spinner
|
||||
? setInterval(() => {
|
||||
const elapsedSeconds = Math.floor((Date.now() - startTime) / 1000);
|
||||
spinner.text = ` ${spinnerBaseText} (${elapsedSeconds}s elapsed)`;
|
||||
}, 1000)
|
||||
: null;
|
||||
elapsedInterval?.unref?.();
|
||||
|
||||
if (!spinner) {
|
||||
console.log(chalk.cyan(` ${spinnerBaseText}\n`));
|
||||
}
|
||||
|
||||
// Handle process interruption (SIGINT/SIGTERM) to notify dashboard
|
||||
const handleInterrupt = (signal: string) => {
|
||||
const duration = Date.now() - startTime;
|
||||
console.log(chalk.yellow(`\n Interrupted by ${signal}`));
|
||||
if (elapsedInterval) clearInterval(elapsedInterval);
|
||||
if (spinner) {
|
||||
spinner.warn(`Interrupted by ${signal} (${Math.floor(duration / 1000)}s elapsed)`);
|
||||
} else {
|
||||
console.log(chalk.yellow(`\n Interrupted by ${signal}`));
|
||||
}
|
||||
|
||||
// Kill child process (gemini/codex/qwen CLI) if running
|
||||
killCurrentCliProcess();
|
||||
@@ -790,6 +821,19 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
stream: !!stream // stream=true → streaming enabled (no cache), stream=false → cache output (default)
|
||||
}, onOutput); // Always pass onOutput for real-time dashboard streaming
|
||||
|
||||
if (elapsedInterval) clearInterval(elapsedInterval);
|
||||
if (spinner) {
|
||||
const durationSeconds = (result.execution.duration_ms / 1000).toFixed(1);
|
||||
const turnInfo = result.success && result.conversation.turn_count > 1
|
||||
? ` (turn ${result.conversation.turn_count})`
|
||||
: '';
|
||||
if (result.success) {
|
||||
spinner.succeed(`Completed in ${durationSeconds}s${turnInfo}`);
|
||||
} else {
|
||||
spinner.fail(`Failed after ${durationSeconds}s`);
|
||||
}
|
||||
}
|
||||
|
||||
// If not streaming (default), print output now
|
||||
// Prefer parsedOutput (from stream parser) over raw stdout for better formatting
|
||||
if (!stream) {
|
||||
@@ -802,10 +846,12 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
// Print summary with execution ID and turn info
|
||||
console.log();
|
||||
if (result.success) {
|
||||
const turnInfo = result.conversation.turn_count > 1
|
||||
? ` (turn ${result.conversation.turn_count})`
|
||||
: '';
|
||||
console.log(chalk.green(` ✓ Completed in ${(result.execution.duration_ms / 1000).toFixed(1)}s${turnInfo}`));
|
||||
if (!spinner) {
|
||||
const turnInfo = result.conversation.turn_count > 1
|
||||
? ` (turn ${result.conversation.turn_count})`
|
||||
: '';
|
||||
console.log(chalk.green(` ✓ Completed in ${(result.execution.duration_ms / 1000).toFixed(1)}s${turnInfo}`));
|
||||
}
|
||||
console.log(chalk.gray(` ID: ${result.execution.id}`));
|
||||
if (isMerge && !id) {
|
||||
// Merge without custom ID: updated all source conversations
|
||||
@@ -844,7 +890,9 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
// Delay to allow HTTP request to complete
|
||||
setTimeout(() => process.exit(0), 150);
|
||||
} else {
|
||||
console.log(chalk.red(` ✗ Failed (${result.execution.status})`));
|
||||
if (!spinner) {
|
||||
console.log(chalk.red(` ✗ Failed (${result.execution.status})`));
|
||||
}
|
||||
console.log(chalk.gray(` ID: ${result.execution.id}`));
|
||||
console.log(chalk.gray(` Duration: ${(result.execution.duration_ms / 1000).toFixed(1)}s`));
|
||||
console.log(chalk.gray(` Exit Code: ${result.execution.exit_code}`));
|
||||
@@ -861,6 +909,8 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
}
|
||||
if (stderrLines.length > 30) {
|
||||
console.log(chalk.yellow(` ... ${stderrLines.length - 30} more lines`));
|
||||
console.log(chalk.cyan(` 💡 View full output: ccw cli output ${result.execution.id}`));
|
||||
console.log();
|
||||
}
|
||||
console.log(chalk.gray(' ' + '─'.repeat(60)));
|
||||
}
|
||||
@@ -870,7 +920,6 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
console.log(chalk.yellow.bold(' Troubleshooting:'));
|
||||
console.log(chalk.gray(` • Check if ${tool} is properly installed: ccw cli status`));
|
||||
console.log(chalk.gray(` • Enable debug mode: DEBUG=true ccw cli -p "..." or set DEBUG=true && ccw cli -p "..."`));
|
||||
console.log(chalk.gray(` • View full output: ccw cli output ${result.execution.id}`));
|
||||
if (result.stderr?.includes('API key') || result.stderr?.includes('Authentication')) {
|
||||
console.log(chalk.gray(` • Check API key configuration for ${tool}`));
|
||||
}
|
||||
@@ -901,6 +950,8 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
||||
}
|
||||
} catch (error) {
|
||||
const err = error as Error;
|
||||
if (elapsedInterval) clearInterval(elapsedInterval);
|
||||
if (spinner) spinner.fail('Execution error');
|
||||
console.error(chalk.red.bold(`\n ✗ Execution Error\n`));
|
||||
console.error(chalk.red(` ${err.message}`));
|
||||
|
||||
@@ -1121,8 +1172,8 @@ export async function cliCommand(
|
||||
console.log(chalk.bold.cyan('\n CCW CLI Tool Executor\n'));
|
||||
console.log(' Unified interface for Gemini, Qwen, and Codex CLI tools.\n');
|
||||
console.log(' Usage:');
|
||||
console.log(chalk.gray(' ccw cli -p "<prompt>" --tool <tool> Execute with prompt'));
|
||||
console.log(chalk.gray(' ccw cli -f prompt.txt --tool <tool> Execute from file'));
|
||||
console.log(chalk.gray(' ccw cli -f prompt.txt --tool <tool> Execute from file (recommended for multi-line)'));
|
||||
console.log(chalk.gray(' ccw cli -p "<prompt>" --tool <tool> Execute with prompt (single-line)'));
|
||||
console.log();
|
||||
console.log(' Subcommands:');
|
||||
console.log(chalk.gray(' status Check CLI tools availability'));
|
||||
@@ -1133,8 +1184,8 @@ export async function cliCommand(
|
||||
console.log(chalk.gray(' test-parse [args] Debug CLI argument parsing'));
|
||||
console.log();
|
||||
console.log(' Options:');
|
||||
console.log(chalk.gray(' -p, --prompt <text> Prompt text'));
|
||||
console.log(chalk.gray(' -f, --file <file> Read prompt from file'));
|
||||
console.log(chalk.gray(' -f, --file <file> Read prompt from file (recommended for multi-line prompts)'));
|
||||
console.log(chalk.gray(' -p, --prompt <text> Prompt text (single-line)'));
|
||||
console.log(chalk.gray(' --tool <tool> Tool: gemini, qwen, codex (default: gemini)'));
|
||||
console.log(chalk.gray(' --mode <mode> Mode: analysis, write, auto (default: analysis)'));
|
||||
console.log(chalk.gray(' -d, --debug Enable debug logging for troubleshooting'));
|
||||
@@ -1146,6 +1197,27 @@ export async function cliCommand(
|
||||
console.log(chalk.gray(' --cache <items> Cache: comma-separated @patterns and text'));
|
||||
console.log(chalk.gray(' --inject-mode <m> Inject mode: none, full, progressive'));
|
||||
console.log();
|
||||
console.log(' Examples:');
|
||||
console.log(chalk.gray(' ccw cli -f my-prompt.txt --tool gemini'));
|
||||
console.log();
|
||||
console.log(chalk.gray(' # Bash/Linux heredoc'));
|
||||
console.log(chalk.gray(" ccw cli -f <(cat <<'EOF'"));
|
||||
console.log(chalk.gray(' PURPOSE: Multi-line prompt'));
|
||||
console.log(chalk.gray(' TASK: Example task'));
|
||||
console.log(chalk.gray(' EOF'));
|
||||
console.log(chalk.gray(' ) --tool gemini'));
|
||||
console.log();
|
||||
console.log(chalk.gray(' # PowerShell multi-line'));
|
||||
console.log(chalk.gray(" @'"));
|
||||
console.log(chalk.gray(' PURPOSE: Multi-line prompt'));
|
||||
console.log(chalk.gray(' TASK: Example task'));
|
||||
console.log(chalk.gray(" '@ | Out-File -Encoding utf8 prompt.tmp; ccw cli -f prompt.tmp --tool gemini"));
|
||||
console.log();
|
||||
console.log(chalk.gray(' ccw cli --resume --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*.ts" --tool codex'));
|
||||
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*" --inject-mode progressive --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli output <id> --final # View result with usage hint'));
|
||||
console.log();
|
||||
console.log(' Cache format:');
|
||||
console.log(chalk.gray(' --cache "@src/**/*.ts,@CLAUDE.md" # @patterns to pack'));
|
||||
console.log(chalk.gray(' --cache "@src/**/*,extra context" # patterns + text content'));
|
||||
@@ -1162,14 +1234,7 @@ export async function cliCommand(
|
||||
console.log(chalk.gray(' --offset <n> Start from byte offset'));
|
||||
console.log(chalk.gray(' --limit <n> Limit output bytes'));
|
||||
console.log();
|
||||
console.log(' Examples:');
|
||||
console.log(chalk.gray(' ccw cli -p "Analyze auth module" --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli -f prompt.txt --tool codex --mode write'));
|
||||
console.log(chalk.gray(' ccw cli -p "$(cat template.md)" --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli --resume --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*.ts" --tool codex'));
|
||||
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*" --inject-mode progressive --tool gemini'));
|
||||
console.log(chalk.gray(' ccw cli output <id> --final # View result with usage hint'));
|
||||
console.log(chalk.dim(' Tip: For complex prompts, use --file to avoid shell escaping issues'));
|
||||
console.log();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,8 +6,18 @@
|
||||
|
||||
import chalk from 'chalk';
|
||||
import { execSync } from 'child_process';
|
||||
import inquirer from 'inquirer';
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, statSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
import { EXEC_TIMEOUTS } from '../utils/exec-constants.js';
|
||||
|
||||
function isExecTimeoutError(error: unknown): boolean {
|
||||
const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null;
|
||||
const code = err?.code ?? err?.errno;
|
||||
if (code === 'ETIMEDOUT') return true;
|
||||
const message = typeof err?.message === 'string' ? err.message : '';
|
||||
return message.includes('ETIMEDOUT');
|
||||
}
|
||||
|
||||
// Handle EPIPE errors gracefully
|
||||
process.stdout.on('error', (err: NodeJS.ErrnoException) => {
|
||||
@@ -262,13 +272,15 @@ function getProjectRoot(): string {
|
||||
// Get the common git directory (points to main repo's .git)
|
||||
const gitCommonDir = execSync('git rev-parse --git-common-dir', {
|
||||
encoding: 'utf-8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_QUICK,
|
||||
}).trim();
|
||||
|
||||
// Get the current git directory
|
||||
const gitDir = execSync('git rev-parse --git-dir', {
|
||||
encoding: 'utf-8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_QUICK,
|
||||
}).trim();
|
||||
|
||||
// Normalize paths for comparison (Windows case insensitive)
|
||||
@@ -287,7 +299,10 @@ function getProjectRoot(): string {
|
||||
return mainRepoRoot;
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
} catch (err: unknown) {
|
||||
if (isExecTimeoutError(err)) {
|
||||
console.warn(`[issue] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms; falling back to filesystem detection`);
|
||||
}
|
||||
// Git command failed - fall through to manual detection
|
||||
}
|
||||
|
||||
@@ -334,7 +349,7 @@ function ensureIssuesDir(): void {
|
||||
|
||||
// ============ Issues JSONL ============
|
||||
|
||||
function readIssues(): Issue[] {
|
||||
export function readIssues(): Issue[] {
|
||||
const path = join(getIssuesDir(), 'issues.jsonl');
|
||||
if (!existsSync(path)) return [];
|
||||
try {
|
||||
@@ -347,7 +362,7 @@ function readIssues(): Issue[] {
|
||||
}
|
||||
}
|
||||
|
||||
function writeIssues(issues: Issue[]): void {
|
||||
export function writeIssues(issues: Issue[]): void {
|
||||
ensureIssuesDir();
|
||||
const path = join(getIssuesDir(), 'issues.jsonl');
|
||||
// Always add trailing newline for proper JSONL format
|
||||
@@ -482,7 +497,7 @@ function getSolutionsPath(issueId: string): string {
|
||||
return join(getIssuesDir(), 'solutions', `${issueId}.jsonl`);
|
||||
}
|
||||
|
||||
function readSolutions(issueId: string): Solution[] {
|
||||
export function readSolutions(issueId: string): Solution[] {
|
||||
const path = getSolutionsPath(issueId);
|
||||
if (!existsSync(path)) return [];
|
||||
try {
|
||||
@@ -495,7 +510,7 @@ function readSolutions(issueId: string): Solution[] {
|
||||
}
|
||||
}
|
||||
|
||||
function writeSolutions(issueId: string, solutions: Solution[]): void {
|
||||
export function writeSolutions(issueId: string, solutions: Solution[]): void {
|
||||
const dir = join(getIssuesDir(), 'solutions');
|
||||
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
||||
// Always add trailing newline for proper JSONL format
|
||||
@@ -596,7 +611,7 @@ function generateQueueFileId(): string {
|
||||
return `QUE-${ts}`;
|
||||
}
|
||||
|
||||
function readQueue(queueId?: string): Queue | null {
|
||||
export function readQueue(queueId?: string): Queue | null {
|
||||
const index = readQueueIndex();
|
||||
const targetId = queueId || index.active_queue_id;
|
||||
|
||||
@@ -748,7 +763,7 @@ function parseFailureReason(reason: string): FailureDetail {
|
||||
};
|
||||
}
|
||||
|
||||
function writeQueue(queue: Queue): void {
|
||||
export function writeQueue(queue: Queue): void {
|
||||
ensureQueuesDir();
|
||||
|
||||
// Support both old (tasks) and new (solutions) queue format
|
||||
@@ -1841,6 +1856,20 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!options.force) {
|
||||
const { proceed } = await inquirer.prompt([{
|
||||
type: 'confirm',
|
||||
name: 'proceed',
|
||||
message: `Delete queue ${queueId}? This action cannot be undone.`,
|
||||
default: false
|
||||
}]);
|
||||
|
||||
if (!proceed) {
|
||||
console.log(chalk.yellow('Queue deletion cancelled'));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from index
|
||||
const index = readQueueIndex();
|
||||
index.queues = index.queues.filter(q => q.id !== queueId);
|
||||
|
||||
@@ -7,6 +7,7 @@ import type { Server } from 'http';
|
||||
interface ServeOptions {
|
||||
port?: number;
|
||||
path?: string;
|
||||
host?: string;
|
||||
browser?: boolean;
|
||||
}
|
||||
|
||||
@@ -16,6 +17,7 @@ interface ServeOptions {
|
||||
*/
|
||||
export async function serveCommand(options: ServeOptions): Promise<void> {
|
||||
const port = options.port || 3456;
|
||||
const host = options.host || '127.0.0.1';
|
||||
|
||||
// Validate project path
|
||||
let initialPath = process.cwd();
|
||||
@@ -30,26 +32,34 @@ export async function serveCommand(options: ServeOptions): Promise<void> {
|
||||
|
||||
console.log(chalk.blue.bold('\n CCW Dashboard Server\n'));
|
||||
console.log(chalk.gray(` Initial project: ${initialPath}`));
|
||||
console.log(chalk.gray(` Host: ${host}`));
|
||||
console.log(chalk.gray(` Port: ${port}\n`));
|
||||
|
||||
try {
|
||||
// Start server
|
||||
console.log(chalk.cyan(' Starting server...'));
|
||||
const server = await startServer({ port, initialPath });
|
||||
const server = await startServer({ port, host, initialPath });
|
||||
|
||||
const url = `http://localhost:${port}`;
|
||||
console.log(chalk.green(` Server running at ${url}`));
|
||||
const boundUrl = `http://${host}:${port}`;
|
||||
const browserUrl = host === '0.0.0.0' || host === '::' ? `http://localhost:${port}` : boundUrl;
|
||||
|
||||
if (!['127.0.0.1', 'localhost', '::1'].includes(host)) {
|
||||
console.log(chalk.yellow(`\n WARNING: Binding to ${host} exposes the server to network attacks.`));
|
||||
console.log(chalk.yellow(' Ensure firewall is configured and never expose tokens publicly.\n'));
|
||||
}
|
||||
|
||||
console.log(chalk.green(` Server running at ${boundUrl}`));
|
||||
|
||||
// Open browser
|
||||
if (options.browser !== false) {
|
||||
console.log(chalk.cyan(' Opening in browser...'));
|
||||
try {
|
||||
await launchBrowser(url);
|
||||
await launchBrowser(browserUrl);
|
||||
console.log(chalk.green.bold('\n Dashboard opened in browser!'));
|
||||
} catch (err) {
|
||||
const error = err as Error;
|
||||
console.log(chalk.yellow(`\n Could not open browser: ${error.message}`));
|
||||
console.log(chalk.gray(` Open manually: ${url}`));
|
||||
console.log(chalk.gray(` Open manually: ${browserUrl}`));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -59,20 +59,47 @@ export async function stopCommand(options: StopOptions): Promise<void> {
|
||||
signal: AbortSignal.timeout(2000)
|
||||
}).catch(() => null);
|
||||
|
||||
if (healthCheck && healthCheck.ok) {
|
||||
// CCW server is running - send shutdown signal
|
||||
if (healthCheck) {
|
||||
// CCW server is running (may require authentication) - send shutdown signal
|
||||
console.log(chalk.cyan(' CCW server found, sending shutdown signal...'));
|
||||
|
||||
await fetch(`http://localhost:${port}/api/shutdown`, {
|
||||
let token: string | undefined;
|
||||
try {
|
||||
const tokenResponse = await fetch(`http://localhost:${port}/api/auth/token`, {
|
||||
signal: AbortSignal.timeout(2000)
|
||||
});
|
||||
const tokenData = await tokenResponse.json() as { token?: string };
|
||||
token = tokenData.token;
|
||||
} catch {
|
||||
// Ignore token acquisition errors; shutdown request will fail with 401.
|
||||
}
|
||||
|
||||
const shutdownResponse = await fetch(`http://localhost:${port}/api/shutdown`, {
|
||||
method: 'POST',
|
||||
headers: token ? { Authorization: `Bearer ${token}` } : undefined,
|
||||
signal: AbortSignal.timeout(5000)
|
||||
}).catch(() => null);
|
||||
|
||||
// Wait a moment for shutdown
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
|
||||
console.log(chalk.green.bold('\n Server stopped successfully!\n'));
|
||||
process.exit(0);
|
||||
if (shutdownResponse && 'ok' in shutdownResponse && shutdownResponse.ok) {
|
||||
console.log(chalk.green.bold('\n Server stopped successfully!\n'));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Best-effort verify shutdown (may still succeed even if shutdown endpoint didn't return ok)
|
||||
const postCheck = await fetch(`http://localhost:${port}/api/health`, {
|
||||
signal: AbortSignal.timeout(2000)
|
||||
}).catch(() => null);
|
||||
|
||||
if (!postCheck) {
|
||||
console.log(chalk.green.bold('\n Server stopped successfully!\n'));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const statusHint = shutdownResponse ? `HTTP ${shutdownResponse.status}` : 'no response';
|
||||
console.log(chalk.yellow(` Shutdown request did not stop server (${statusHint}).`));
|
||||
}
|
||||
|
||||
// No CCW server responding, check if port is in use
|
||||
|
||||
@@ -6,6 +6,7 @@ import chalk from 'chalk';
|
||||
interface ViewOptions {
|
||||
port?: number;
|
||||
path?: string;
|
||||
host?: string;
|
||||
browser?: boolean;
|
||||
}
|
||||
|
||||
@@ -30,7 +31,8 @@ async function isServerRunning(port: number): Promise<boolean> {
|
||||
});
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
return response.ok;
|
||||
// Authenticated APIs may return 401; any HTTP response means server is running.
|
||||
return response.status > 0;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
@@ -44,8 +46,13 @@ async function isServerRunning(port: number): Promise<boolean> {
|
||||
*/
|
||||
async function switchWorkspace(port: number, path: string): Promise<SwitchWorkspaceResult> {
|
||||
try {
|
||||
const tokenResponse = await fetch(`http://localhost:${port}/api/auth/token`);
|
||||
const tokenData = await tokenResponse.json() as { token?: string };
|
||||
const token = tokenData.token;
|
||||
|
||||
const response = await fetch(
|
||||
`http://localhost:${port}/api/switch-path?path=${encodeURIComponent(path)}`
|
||||
`http://localhost:${port}/api/switch-path?path=${encodeURIComponent(path)}`,
|
||||
token ? { headers: { Authorization: `Bearer ${token}` } } : undefined
|
||||
);
|
||||
return await response.json() as SwitchWorkspaceResult;
|
||||
} catch (err) {
|
||||
@@ -62,6 +69,8 @@ async function switchWorkspace(port: number, path: string): Promise<SwitchWorksp
|
||||
*/
|
||||
export async function viewCommand(options: ViewOptions): Promise<void> {
|
||||
const port = options.port || 3456;
|
||||
const host = options.host || '127.0.0.1';
|
||||
const browserHost = host === '0.0.0.0' || host === '::' ? 'localhost' : host;
|
||||
|
||||
// Resolve workspace path
|
||||
let workspacePath = process.cwd();
|
||||
@@ -89,7 +98,7 @@ export async function viewCommand(options: ViewOptions): Promise<void> {
|
||||
console.log(chalk.green(` Workspace switched successfully`));
|
||||
|
||||
// Open browser with the new path
|
||||
const url = `http://localhost:${port}/?path=${encodeURIComponent(result.path!)}`;
|
||||
const url = `http://${browserHost}:${port}/?path=${encodeURIComponent(result.path!)}`;
|
||||
|
||||
if (options.browser !== false) {
|
||||
console.log(chalk.cyan(' Opening in browser...'));
|
||||
@@ -113,6 +122,7 @@ export async function viewCommand(options: ViewOptions): Promise<void> {
|
||||
await serveCommand({
|
||||
path: workspacePath,
|
||||
port: port,
|
||||
host,
|
||||
browser: options.browser
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
* Manages provider credentials, custom endpoints, and cache settings
|
||||
*/
|
||||
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'fs';
|
||||
import { homedir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { StoragePaths, GlobalPaths, ensureStorageDir } from './storage-paths.js';
|
||||
@@ -44,6 +44,14 @@ function getConfigPath(_baseDir?: string): string {
|
||||
return join(configDir, 'litellm-api-config.json');
|
||||
}
|
||||
|
||||
function bestEffortRestrictPermissions(filePath: string, mode: number): void {
|
||||
try {
|
||||
chmodSync(filePath, mode);
|
||||
} catch {
|
||||
// Ignore permission errors (e.g., Windows or restrictive environments)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from file
|
||||
*/
|
||||
@@ -68,7 +76,8 @@ export function loadLiteLLMApiConfig(baseDir: string): LiteLLMApiConfig {
|
||||
*/
|
||||
function saveConfig(baseDir: string, config: LiteLLMApiConfig): void {
|
||||
const configPath = getConfigPath(baseDir);
|
||||
writeFileSync(configPath, JSON.stringify(config, null, 2), 'utf-8');
|
||||
writeFileSync(configPath, JSON.stringify(config, null, 2), { encoding: 'utf8', mode: 0o600 });
|
||||
bestEffortRestrictPermissions(configPath, 0o600);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -117,10 +126,26 @@ export function getProviderWithResolvedEnvVars(
|
||||
const provider = getProvider(baseDir, providerId);
|
||||
if (!provider) return null;
|
||||
|
||||
return {
|
||||
const resolvedApiKey = resolveEnvVar(provider.apiKey);
|
||||
|
||||
// Avoid leaking env-var syntax or secrets if this object is logged/serialized.
|
||||
const sanitizedProvider: ProviderCredential = {
|
||||
...provider,
|
||||
resolvedApiKey: resolveEnvVar(provider.apiKey),
|
||||
apiKey: '***',
|
||||
apiKeys: provider.apiKeys?.map(keyEntry => ({
|
||||
...keyEntry,
|
||||
key: '***',
|
||||
})),
|
||||
};
|
||||
|
||||
Object.defineProperty(sanitizedProvider, 'resolvedApiKey', {
|
||||
value: resolvedApiKey,
|
||||
enumerable: false,
|
||||
writable: false,
|
||||
configurable: false,
|
||||
});
|
||||
|
||||
return sanitizedProvider as ProviderCredential & { resolvedApiKey: string };
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
104
ccw/src/core/auth/csrf-manager.ts
Normal file
104
ccw/src/core/auth/csrf-manager.ts
Normal file
@@ -0,0 +1,104 @@
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
export interface CsrfTokenManagerOptions {
|
||||
tokenTtlMs?: number;
|
||||
cleanupIntervalMs?: number;
|
||||
}
|
||||
|
||||
type CsrfTokenRecord = {
|
||||
sessionId: string;
|
||||
expiresAtMs: number;
|
||||
used: boolean;
|
||||
};
|
||||
|
||||
const DEFAULT_TOKEN_TTL_MS = 15 * 60 * 1000; // 15 minutes
|
||||
const DEFAULT_CLEANUP_INTERVAL_MS = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
export class CsrfTokenManager {
|
||||
private readonly tokenTtlMs: number;
|
||||
private readonly records = new Map<string, CsrfTokenRecord>();
|
||||
private readonly cleanupTimer: NodeJS.Timeout | null;
|
||||
|
||||
constructor(options: CsrfTokenManagerOptions = {}) {
|
||||
this.tokenTtlMs = options.tokenTtlMs ?? DEFAULT_TOKEN_TTL_MS;
|
||||
|
||||
const cleanupIntervalMs = options.cleanupIntervalMs ?? DEFAULT_CLEANUP_INTERVAL_MS;
|
||||
if (cleanupIntervalMs > 0) {
|
||||
this.cleanupTimer = setInterval(() => {
|
||||
this.cleanupExpiredTokens();
|
||||
}, cleanupIntervalMs);
|
||||
|
||||
if (this.cleanupTimer.unref) {
|
||||
this.cleanupTimer.unref();
|
||||
}
|
||||
} else {
|
||||
this.cleanupTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
if (this.cleanupTimer) {
|
||||
clearInterval(this.cleanupTimer);
|
||||
}
|
||||
this.records.clear();
|
||||
}
|
||||
|
||||
generateToken(sessionId: string): string {
|
||||
const token = randomBytes(32).toString('hex');
|
||||
this.records.set(token, {
|
||||
sessionId,
|
||||
expiresAtMs: Date.now() + this.tokenTtlMs,
|
||||
used: false,
|
||||
});
|
||||
return token;
|
||||
}
|
||||
|
||||
validateToken(token: string, sessionId: string): boolean {
|
||||
const record = this.records.get(token);
|
||||
if (!record) return false;
|
||||
if (record.used) return false;
|
||||
if (record.sessionId !== sessionId) return false;
|
||||
|
||||
if (Date.now() > record.expiresAtMs) {
|
||||
this.records.delete(token);
|
||||
return false;
|
||||
}
|
||||
|
||||
record.used = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
cleanupExpiredTokens(nowMs: number = Date.now()): number {
|
||||
let removed = 0;
|
||||
|
||||
for (const [token, record] of this.records.entries()) {
|
||||
if (record.used || nowMs > record.expiresAtMs) {
|
||||
this.records.delete(token);
|
||||
removed += 1;
|
||||
}
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
getActiveTokenCount(): number {
|
||||
return this.records.size;
|
||||
}
|
||||
}
|
||||
|
||||
let csrfManagerInstance: CsrfTokenManager | null = null;
|
||||
|
||||
export function getCsrfTokenManager(options?: CsrfTokenManagerOptions): CsrfTokenManager {
|
||||
if (!csrfManagerInstance) {
|
||||
csrfManagerInstance = new CsrfTokenManager(options);
|
||||
}
|
||||
return csrfManagerInstance;
|
||||
}
|
||||
|
||||
export function resetCsrfTokenManager(): void {
|
||||
if (csrfManagerInstance) {
|
||||
csrfManagerInstance.dispose();
|
||||
}
|
||||
csrfManagerInstance = null;
|
||||
}
|
||||
|
||||
158
ccw/src/core/auth/csrf-middleware.ts
Normal file
158
ccw/src/core/auth/csrf-middleware.ts
Normal file
@@ -0,0 +1,158 @@
|
||||
import type http from 'http';
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { getCsrfTokenManager } from './csrf-manager.js';
|
||||
|
||||
export interface CsrfMiddlewareContext {
|
||||
pathname: string;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
}
|
||||
|
||||
function getHeaderValue(header: string | string[] | undefined): string | null {
|
||||
if (!header) return null;
|
||||
if (Array.isArray(header)) return header[0] ?? null;
|
||||
return header;
|
||||
}
|
||||
|
||||
function parseCookieHeader(cookieHeader: string | null | undefined): Record<string, string> {
|
||||
if (!cookieHeader) return {};
|
||||
|
||||
const cookies: Record<string, string> = {};
|
||||
for (const part of cookieHeader.split(';')) {
|
||||
const [rawName, ...rawValueParts] = part.trim().split('=');
|
||||
if (!rawName) continue;
|
||||
const rawValue = rawValueParts.join('=');
|
||||
try {
|
||||
cookies[rawName] = decodeURIComponent(rawValue);
|
||||
} catch {
|
||||
cookies[rawName] = rawValue;
|
||||
}
|
||||
}
|
||||
return cookies;
|
||||
}
|
||||
|
||||
function appendSetCookie(res: ServerResponse, cookie: string): void {
|
||||
const existing = res.getHeader('Set-Cookie');
|
||||
if (!existing) {
|
||||
res.setHeader('Set-Cookie', cookie);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(existing)) {
|
||||
res.setHeader('Set-Cookie', [...existing, cookie]);
|
||||
return;
|
||||
}
|
||||
|
||||
res.setHeader('Set-Cookie', [String(existing), cookie]);
|
||||
}
|
||||
|
||||
function setCsrfCookie(res: ServerResponse, token: string, maxAgeSeconds: number): void {
|
||||
const attributes = [
|
||||
`XSRF-TOKEN=${encodeURIComponent(token)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${maxAgeSeconds}`,
|
||||
];
|
||||
appendSetCookie(res, attributes.join('; '));
|
||||
}
|
||||
|
||||
function envFlagEnabled(name: string): boolean {
|
||||
const value = process.env[name];
|
||||
if (!value) return false;
|
||||
return ['1', 'true', 'yes', 'on'].includes(value.trim().toLowerCase());
|
||||
}
|
||||
|
||||
async function readRawBody(req: IncomingMessage): Promise<string> {
|
||||
const withCache = req as http.IncomingMessage & { __ccwRawBody?: string };
|
||||
if (typeof withCache.__ccwRawBody === 'string') return withCache.__ccwRawBody;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', () => {
|
||||
withCache.__ccwRawBody = body;
|
||||
resolve(body);
|
||||
});
|
||||
req.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async function readJsonBody(req: IncomingMessage): Promise<unknown> {
|
||||
const withCache = req as http.IncomingMessage & { body?: unknown };
|
||||
if (withCache.body !== undefined) return withCache.body;
|
||||
|
||||
const raw = await readRawBody(req);
|
||||
if (!raw) return undefined;
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
withCache.body = parsed;
|
||||
return parsed;
|
||||
} catch {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function extractCsrfTokenFromBody(body: unknown): string | null {
|
||||
if (!body || typeof body !== 'object') return null;
|
||||
const record = body as Record<string, unknown>;
|
||||
const token = record.csrfToken;
|
||||
return typeof token === 'string' && token ? token : null;
|
||||
}
|
||||
|
||||
function writeJson(res: ServerResponse, status: number, body: Record<string, unknown>): void {
|
||||
res.writeHead(status, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify(body));
|
||||
}
|
||||
|
||||
export async function csrfValidation(ctx: CsrfMiddlewareContext): Promise<boolean> {
|
||||
const { pathname, req, res } = ctx;
|
||||
|
||||
if (!pathname.startsWith('/api/')) return true;
|
||||
if (envFlagEnabled('CCW_DISABLE_CSRF')) return true;
|
||||
|
||||
const method = (req.method || 'GET').toUpperCase();
|
||||
if (!['POST', 'PUT', 'PATCH', 'DELETE'].includes(method)) return true;
|
||||
|
||||
// Always allow token acquisition routes.
|
||||
if (pathname === '/api/auth/token') return true;
|
||||
|
||||
// Requests authenticated via Authorization header do not require CSRF protection.
|
||||
const authorization = getHeaderValue(req.headers.authorization);
|
||||
if (authorization && /^Bearer\s+.+$/i.test(authorization)) return true;
|
||||
|
||||
const headerToken = getHeaderValue(req.headers['x-csrf-token']);
|
||||
const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie));
|
||||
const cookieToken = cookies['XSRF-TOKEN'];
|
||||
|
||||
let bodyToken: string | null = null;
|
||||
if (!headerToken && !cookieToken) {
|
||||
const body = await readJsonBody(req);
|
||||
bodyToken = extractCsrfTokenFromBody(body);
|
||||
}
|
||||
|
||||
const token = headerToken || bodyToken || cookieToken || null;
|
||||
const sessionId = cookies.ccw_session_id;
|
||||
|
||||
if (!token || !sessionId) {
|
||||
writeJson(res, 403, { error: 'CSRF validation failed' });
|
||||
return false;
|
||||
}
|
||||
|
||||
const tokenManager = getCsrfTokenManager();
|
||||
const ok = tokenManager.validateToken(token, sessionId);
|
||||
if (!ok) {
|
||||
writeJson(res, 403, { error: 'CSRF validation failed' });
|
||||
return false;
|
||||
}
|
||||
|
||||
const nextToken = tokenManager.generateToken(sessionId);
|
||||
res.setHeader('X-CSRF-Token', nextToken);
|
||||
setCsrfCookie(res, nextToken, 15 * 60);
|
||||
|
||||
return true;
|
||||
}
|
||||
94
ccw/src/core/auth/middleware.ts
Normal file
94
ccw/src/core/auth/middleware.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import type http from 'http';
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { TokenManager } from './token-manager.js';
|
||||
|
||||
export interface AuthMiddlewareContext {
|
||||
pathname: string;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
tokenManager: TokenManager;
|
||||
secretKey: string;
|
||||
unauthenticatedPaths?: Set<string>;
|
||||
}
|
||||
|
||||
function parseCookieHeader(cookieHeader: string | null | undefined): Record<string, string> {
|
||||
if (!cookieHeader) return {};
|
||||
|
||||
const cookies: Record<string, string> = {};
|
||||
for (const part of cookieHeader.split(';')) {
|
||||
const [rawName, ...rawValueParts] = part.trim().split('=');
|
||||
if (!rawName) continue;
|
||||
const rawValue = rawValueParts.join('=');
|
||||
try {
|
||||
cookies[rawName] = decodeURIComponent(rawValue);
|
||||
} catch {
|
||||
cookies[rawName] = rawValue;
|
||||
}
|
||||
}
|
||||
return cookies;
|
||||
}
|
||||
|
||||
function getHeaderValue(header: string | string[] | undefined): string | null {
|
||||
if (!header) return null;
|
||||
if (Array.isArray(header)) return header[0] ?? null;
|
||||
return header;
|
||||
}
|
||||
|
||||
export function extractAuthToken(req: IncomingMessage): string | null {
|
||||
const authorization = getHeaderValue(req.headers.authorization);
|
||||
if (authorization) {
|
||||
const match = authorization.match(/^Bearer\s+(.+)$/i);
|
||||
if (match?.[1]) return match[1].trim();
|
||||
}
|
||||
|
||||
const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie));
|
||||
if (cookies.auth_token) return cookies.auth_token;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function isLocalhostRequest(req: IncomingMessage): boolean {
|
||||
const remote = req.socket?.remoteAddress ?? '';
|
||||
return remote === '127.0.0.1' || remote === '::1' || remote === '::ffff:127.0.0.1';
|
||||
}
|
||||
|
||||
export function setAuthCookie(res: ServerResponse, token: string, expiresAt: Date): void {
|
||||
const maxAgeSeconds = Math.max(0, Math.floor((expiresAt.getTime() - Date.now()) / 1000));
|
||||
|
||||
const attributes = [
|
||||
`auth_token=${encodeURIComponent(token)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${maxAgeSeconds}`,
|
||||
];
|
||||
|
||||
res.setHeader('Set-Cookie', attributes.join('; '));
|
||||
}
|
||||
|
||||
function writeJson(res: ServerResponse, status: number, body: Record<string, unknown>): void {
|
||||
res.writeHead(status, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify(body));
|
||||
}
|
||||
|
||||
export function authMiddleware(ctx: AuthMiddlewareContext): boolean {
|
||||
const { pathname, req, res, tokenManager, secretKey, unauthenticatedPaths } = ctx;
|
||||
|
||||
if (!pathname.startsWith('/api/')) return true;
|
||||
if (unauthenticatedPaths?.has(pathname)) return true;
|
||||
|
||||
const token = extractAuthToken(req);
|
||||
if (!token) {
|
||||
writeJson(res, 401, { error: 'Unauthorized' });
|
||||
return false;
|
||||
}
|
||||
|
||||
const ok = tokenManager.validateToken(token, secretKey);
|
||||
if (!ok) {
|
||||
writeJson(res, 401, { error: 'Unauthorized' });
|
||||
return false;
|
||||
}
|
||||
|
||||
(req as http.IncomingMessage & { authenticated?: boolean }).authenticated = true;
|
||||
return true;
|
||||
}
|
||||
219
ccw/src/core/auth/token-manager.ts
Normal file
219
ccw/src/core/auth/token-manager.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { randomBytes } from 'crypto';
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, chmodSync } from 'fs';
|
||||
import { dirname, join } from 'path';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import type { Algorithm } from 'jsonwebtoken';
|
||||
import { getCCWHome } from '../../config/storage-paths.js';
|
||||
|
||||
export interface TokenResult {
|
||||
token: string;
|
||||
expiresAt: Date;
|
||||
}
|
||||
|
||||
export interface TokenInfo extends TokenResult {
|
||||
issuedAt: Date;
|
||||
revokedAt?: Date;
|
||||
rotatedAt?: Date;
|
||||
replacedBy?: string;
|
||||
}
|
||||
|
||||
export interface TokenManagerOptions {
|
||||
authDir?: string;
|
||||
secretKeyPath?: string;
|
||||
tokenPath?: string;
|
||||
tokenTtlMs?: number;
|
||||
rotateBeforeExpiryMs?: number;
|
||||
}
|
||||
|
||||
const DEFAULT_TOKEN_TTL_MS = 24 * 60 * 60 * 1000;
|
||||
const DEFAULT_ROTATE_BEFORE_EXPIRY_MS = 60 * 60 * 1000;
|
||||
const JWT_ALGORITHM: Algorithm = 'HS256';
|
||||
|
||||
function ensureDirectory(dirPath: string): void {
|
||||
if (!existsSync(dirPath)) {
|
||||
mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function bestEffortRestrictPermissions(filePath: string, mode: number): void {
|
||||
try {
|
||||
chmodSync(filePath, mode);
|
||||
} catch {
|
||||
// Ignore permission errors (e.g., Windows or restrictive environments)
|
||||
}
|
||||
}
|
||||
|
||||
function writeSecretFile(filePath: string, content: string): void {
|
||||
ensureDirectory(dirname(filePath));
|
||||
writeFileSync(filePath, content, { encoding: 'utf8', mode: 0o600 });
|
||||
bestEffortRestrictPermissions(filePath, 0o600);
|
||||
}
|
||||
|
||||
function writeTokenFile(filePath: string, content: string): void {
|
||||
ensureDirectory(dirname(filePath));
|
||||
writeFileSync(filePath, content, { encoding: 'utf8', mode: 0o600 });
|
||||
bestEffortRestrictPermissions(filePath, 0o600);
|
||||
}
|
||||
|
||||
function parseJwtExpiry(token: string): Date | null {
|
||||
const decoded = jwt.decode(token);
|
||||
if (!decoded || typeof decoded !== 'object') return null;
|
||||
if (typeof decoded.exp !== 'number') return null;
|
||||
return new Date(decoded.exp * 1000);
|
||||
}
|
||||
|
||||
export class TokenManager {
|
||||
private readonly authDir: string;
|
||||
private readonly secretKeyPath: string;
|
||||
private readonly tokenPath: string;
|
||||
private readonly tokenTtlMs: number;
|
||||
private readonly rotateBeforeExpiryMs: number;
|
||||
|
||||
private secretKey: string | null = null;
|
||||
private readonly activeTokens = new Map<string, TokenInfo>();
|
||||
|
||||
constructor(options: TokenManagerOptions = {}) {
|
||||
this.authDir = options.authDir ?? join(getCCWHome(), 'auth');
|
||||
this.secretKeyPath = options.secretKeyPath ?? join(this.authDir, 'secret.key');
|
||||
this.tokenPath = options.tokenPath ?? join(this.authDir, 'token.jwt');
|
||||
this.tokenTtlMs = options.tokenTtlMs ?? DEFAULT_TOKEN_TTL_MS;
|
||||
this.rotateBeforeExpiryMs = options.rotateBeforeExpiryMs ?? DEFAULT_ROTATE_BEFORE_EXPIRY_MS;
|
||||
}
|
||||
|
||||
getSecretKey(): string {
|
||||
if (this.secretKey) return this.secretKey;
|
||||
|
||||
ensureDirectory(this.authDir);
|
||||
if (existsSync(this.secretKeyPath)) {
|
||||
const loaded = readFileSync(this.secretKeyPath, 'utf8').trim();
|
||||
if (!loaded) {
|
||||
throw new Error('Auth secret key file is empty');
|
||||
}
|
||||
this.secretKey = loaded;
|
||||
return loaded;
|
||||
}
|
||||
|
||||
const generated = randomBytes(32).toString('hex');
|
||||
writeSecretFile(this.secretKeyPath, generated);
|
||||
this.secretKey = generated;
|
||||
return generated;
|
||||
}
|
||||
|
||||
generateToken(secretKey: string): TokenResult {
|
||||
const token = jwt.sign(
|
||||
{
|
||||
typ: 'ccw-api',
|
||||
jti: randomBytes(16).toString('hex'),
|
||||
},
|
||||
secretKey,
|
||||
{
|
||||
algorithm: JWT_ALGORITHM,
|
||||
expiresIn: Math.floor(this.tokenTtlMs / 1000),
|
||||
}
|
||||
);
|
||||
|
||||
const expiresAt = parseJwtExpiry(token) ?? new Date(Date.now() + this.tokenTtlMs);
|
||||
this.activeTokens.set(token, { token, expiresAt, issuedAt: new Date() });
|
||||
return { token, expiresAt };
|
||||
}
|
||||
|
||||
validateToken(token: string, secretKey: string): boolean {
|
||||
const info = this.activeTokens.get(token);
|
||||
if (info?.revokedAt) return false;
|
||||
|
||||
try {
|
||||
jwt.verify(token, secretKey, { algorithms: [JWT_ALGORITHM] });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
refreshToken(token: string, secretKey: string): TokenResult {
|
||||
const existing = this.activeTokens.get(token);
|
||||
if (existing) {
|
||||
existing.revokedAt = new Date();
|
||||
}
|
||||
|
||||
const next = this.generateToken(secretKey);
|
||||
if (existing) {
|
||||
existing.rotatedAt = new Date();
|
||||
existing.replacedBy = next.token;
|
||||
}
|
||||
return next;
|
||||
}
|
||||
|
||||
/**
|
||||
* Read an existing persisted token or create a new one.
|
||||
* If the existing token is nearing expiry, rotate it.
|
||||
*/
|
||||
getOrCreateAuthToken(): TokenResult {
|
||||
const secretKey = this.getSecretKey();
|
||||
|
||||
if (existsSync(this.tokenPath)) {
|
||||
const persisted = readFileSync(this.tokenPath, 'utf8').trim();
|
||||
if (persisted && this.validateToken(persisted, secretKey)) {
|
||||
const expiresAt = parseJwtExpiry(persisted);
|
||||
if (expiresAt) {
|
||||
// Ensure persisted token is tracked for revocation support
|
||||
if (!this.activeTokens.has(persisted)) {
|
||||
this.activeTokens.set(persisted, { token: persisted, expiresAt, issuedAt: new Date() });
|
||||
}
|
||||
|
||||
const msUntilExpiry = expiresAt.getTime() - Date.now();
|
||||
if (msUntilExpiry > this.rotateBeforeExpiryMs) {
|
||||
return { token: persisted, expiresAt };
|
||||
}
|
||||
}
|
||||
|
||||
// Token exists but is expiring soon (or expiry missing) → rotate
|
||||
const rotated = this.generateToken(secretKey);
|
||||
writeTokenFile(this.tokenPath, rotated.token);
|
||||
|
||||
const existing = this.activeTokens.get(persisted);
|
||||
if (existing) {
|
||||
existing.rotatedAt = new Date();
|
||||
existing.replacedBy = rotated.token;
|
||||
}
|
||||
|
||||
return rotated;
|
||||
}
|
||||
}
|
||||
|
||||
const created = this.generateToken(secretKey);
|
||||
writeTokenFile(this.tokenPath, created.token);
|
||||
return created;
|
||||
}
|
||||
|
||||
revokeToken(token: string): void {
|
||||
const info = this.activeTokens.get(token);
|
||||
if (info) {
|
||||
info.revokedAt = new Date();
|
||||
} else {
|
||||
this.activeTokens.set(token, {
|
||||
token,
|
||||
issuedAt: new Date(),
|
||||
expiresAt: new Date(0),
|
||||
revokedAt: new Date(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let tokenManagerInstance: TokenManager | null = null;
|
||||
|
||||
export function getTokenManager(options?: TokenManagerOptions): TokenManager {
|
||||
if (!tokenManagerInstance) {
|
||||
tokenManagerInstance = new TokenManager(options);
|
||||
}
|
||||
return tokenManagerInstance;
|
||||
}
|
||||
|
||||
export function resetTokenManager(): void {
|
||||
tokenManagerInstance = null;
|
||||
}
|
||||
|
||||
export function getOrCreateAuthToken(): TokenResult {
|
||||
return getTokenManager().getOrCreateAuthToken();
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { existsSync, mkdirSync, readFileSync, writeFileSync, statSync, unlinkSync, readdirSync } from 'fs';
|
||||
import { join, dirname } from 'path';
|
||||
import { StoragePaths, ensureStorageDir } from '../config/storage-paths.js';
|
||||
import { readFile, readdir, stat, unlink, writeFile, mkdir } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { StoragePaths } from '../config/storage-paths.js';
|
||||
|
||||
interface CacheEntry<T> {
|
||||
data: T;
|
||||
@@ -42,13 +42,17 @@ export class CacheManager<T> {
|
||||
* @param watchPaths - Array of file/directory paths to check for modifications
|
||||
* @returns Cached data or null if invalid/expired
|
||||
*/
|
||||
get(watchPaths: string[] = []): T | null {
|
||||
if (!existsSync(this.cacheFile)) {
|
||||
async get(watchPaths: string[] = []): Promise<T | null> {
|
||||
let content: string;
|
||||
try {
|
||||
content = await readFile(this.cacheFile, 'utf8');
|
||||
} catch (err: any) {
|
||||
if (err?.code === 'ENOENT') return null;
|
||||
console.warn(`Cache read error for ${this.cacheFile}:`, err?.message || String(err));
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = readFileSync(this.cacheFile, 'utf8');
|
||||
const entry: CacheEntry<T> = JSON.parse(content, (key, value) => {
|
||||
// Revive Map objects from JSON
|
||||
if (key === 'fileHashes' && value && typeof value === 'object') {
|
||||
@@ -67,16 +71,16 @@ export class CacheManager<T> {
|
||||
|
||||
// Check if any watched files have changed
|
||||
if (watchPaths.length > 0) {
|
||||
const currentHashes = this.computeFileHashes(watchPaths);
|
||||
const currentHashes = await this.computeFileHashes(watchPaths);
|
||||
if (!this.hashesMatch(entry.fileHashes, currentHashes)) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return entry.data;
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
// If cache file is corrupted or unreadable, treat as invalid
|
||||
console.warn(`Cache read error for ${this.cacheFile}:`, (err as Error).message);
|
||||
console.warn(`Cache parse error for ${this.cacheFile}:`, err?.message || String(err));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -86,17 +90,15 @@ export class CacheManager<T> {
|
||||
* @param data - Data to cache
|
||||
* @param watchPaths - Array of file/directory paths to track
|
||||
*/
|
||||
set(data: T, watchPaths: string[] = []): void {
|
||||
async set(data: T, watchPaths: string[] = []): Promise<void> {
|
||||
try {
|
||||
// Ensure cache directory exists
|
||||
if (!existsSync(this.cacheDir)) {
|
||||
mkdirSync(this.cacheDir, { recursive: true });
|
||||
}
|
||||
await mkdir(this.cacheDir, { recursive: true });
|
||||
|
||||
const entry: CacheEntry<T> = {
|
||||
data,
|
||||
timestamp: Date.now(),
|
||||
fileHashes: this.computeFileHashes(watchPaths),
|
||||
fileHashes: await this.computeFileHashes(watchPaths),
|
||||
ttl: this.ttl
|
||||
};
|
||||
|
||||
@@ -106,7 +108,7 @@ export class CacheManager<T> {
|
||||
fileHashes: Object.fromEntries(entry.fileHashes)
|
||||
};
|
||||
|
||||
writeFileSync(this.cacheFile, JSON.stringify(serializable, null, 2), 'utf8');
|
||||
await writeFile(this.cacheFile, JSON.stringify(serializable, null, 2), 'utf8');
|
||||
} catch (err) {
|
||||
console.warn(`Cache write error for ${this.cacheFile}:`, (err as Error).message);
|
||||
}
|
||||
@@ -115,12 +117,11 @@ export class CacheManager<T> {
|
||||
/**
|
||||
* Invalidate (delete) the cache
|
||||
*/
|
||||
invalidate(): void {
|
||||
async invalidate(): Promise<void> {
|
||||
try {
|
||||
if (existsSync(this.cacheFile)) {
|
||||
unlinkSync(this.cacheFile);
|
||||
}
|
||||
await unlink(this.cacheFile);
|
||||
} catch (err) {
|
||||
if ((err as any)?.code === 'ENOENT') return;
|
||||
console.warn(`Cache invalidation error for ${this.cacheFile}:`, (err as Error).message);
|
||||
}
|
||||
}
|
||||
@@ -130,8 +131,8 @@ export class CacheManager<T> {
|
||||
* @param watchPaths - Array of file/directory paths to check
|
||||
* @returns True if cache exists and is valid
|
||||
*/
|
||||
isValid(watchPaths: string[] = []): boolean {
|
||||
return this.get(watchPaths) !== null;
|
||||
async isValid(watchPaths: string[] = []): Promise<boolean> {
|
||||
return (await this.get(watchPaths)) !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -139,32 +140,29 @@ export class CacheManager<T> {
|
||||
* @param watchPaths - Array of file/directory paths
|
||||
* @returns Map of path to mtime
|
||||
*/
|
||||
private computeFileHashes(watchPaths: string[]): Map<string, number> {
|
||||
private async computeFileHashes(watchPaths: string[]): Promise<Map<string, number>> {
|
||||
const hashes = new Map<string, number>();
|
||||
|
||||
for (const path of watchPaths) {
|
||||
await Promise.all(watchPaths.map(async (watchPath) => {
|
||||
try {
|
||||
if (!existsSync(path)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const stats = statSync(path);
|
||||
const stats = await stat(watchPath);
|
||||
|
||||
if (stats.isDirectory()) {
|
||||
// For directories, use directory mtime (detects file additions/deletions)
|
||||
hashes.set(path, stats.mtimeMs);
|
||||
hashes.set(watchPath, stats.mtimeMs);
|
||||
|
||||
// Also recursively scan for workflow session files
|
||||
this.scanDirectory(path, hashes);
|
||||
await this.scanDirectory(watchPath, hashes);
|
||||
} else {
|
||||
// For files, use file mtime
|
||||
hashes.set(path, stats.mtimeMs);
|
||||
hashes.set(watchPath, stats.mtimeMs);
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (err?.code === 'ENOENT') return;
|
||||
// Skip paths that can't be accessed
|
||||
console.warn(`Cannot access path ${path}:`, (err as Error).message);
|
||||
console.warn(`Cannot access path ${watchPath}:`, err?.message || String(err));
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
return hashes;
|
||||
}
|
||||
@@ -175,26 +173,34 @@ export class CacheManager<T> {
|
||||
* @param hashes - Map to store file hashes
|
||||
* @param depth - Current recursion depth (max 3)
|
||||
*/
|
||||
private scanDirectory(dirPath: string, hashes: Map<string, number>, depth: number = 0): void {
|
||||
private async scanDirectory(dirPath: string, hashes: Map<string, number>, depth: number = 0): Promise<void> {
|
||||
if (depth > 3) return; // Limit recursion depth
|
||||
|
||||
try {
|
||||
const entries = readdirSync(dirPath, { withFileTypes: true });
|
||||
const entries = await readdir(dirPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
await Promise.all(entries.map(async (entry) => {
|
||||
const fullPath = join(dirPath, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
// Track important directories
|
||||
if (entry.name === '.task' || entry.name === '.review' || entry.name === '.summaries') {
|
||||
const stats = statSync(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
this.scanDirectory(fullPath, hashes, depth + 1);
|
||||
try {
|
||||
const stats = await stat(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
await this.scanDirectory(fullPath, hashes, depth + 1);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
} else if (entry.name.startsWith('WFS-')) {
|
||||
// Scan WFS session directories
|
||||
const stats = statSync(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
this.scanDirectory(fullPath, hashes, depth + 1);
|
||||
try {
|
||||
const stats = await stat(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
await this.scanDirectory(fullPath, hashes, depth + 1);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
} else if (entry.isFile()) {
|
||||
// Track important files
|
||||
@@ -204,11 +210,15 @@ export class CacheManager<T> {
|
||||
entry.name === 'TODO_LIST.md' ||
|
||||
entry.name === 'workflow-session.json'
|
||||
) {
|
||||
const stats = statSync(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
try {
|
||||
const stats = await stat(fullPath);
|
||||
hashes.set(fullPath, stats.mtimeMs);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
} catch (err) {
|
||||
// Skip directories that can't be read
|
||||
console.warn(`Cannot scan directory ${dirPath}:`, (err as Error).message);
|
||||
@@ -245,21 +255,24 @@ export class CacheManager<T> {
|
||||
* Get cache statistics
|
||||
* @returns Cache info object
|
||||
*/
|
||||
getStats(): { exists: boolean; age?: number; fileCount?: number; size?: number } {
|
||||
if (!existsSync(this.cacheFile)) {
|
||||
async getStats(): Promise<{ exists: boolean; age?: number; fileCount?: number; size?: number }> {
|
||||
let fileStats;
|
||||
try {
|
||||
fileStats = await stat(this.cacheFile);
|
||||
} catch (err: any) {
|
||||
if (err?.code === 'ENOENT') return { exists: false };
|
||||
return { exists: false };
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = statSync(this.cacheFile);
|
||||
const content = readFileSync(this.cacheFile, 'utf8');
|
||||
const content = await readFile(this.cacheFile, 'utf8');
|
||||
const entry = JSON.parse(content);
|
||||
|
||||
return {
|
||||
exists: true,
|
||||
age: Date.now() - entry.timestamp,
|
||||
fileCount: Object.keys(entry.fileHashes || {}).length,
|
||||
size: stats.size
|
||||
size: fileStats.size
|
||||
};
|
||||
} catch {
|
||||
return { exists: false };
|
||||
@@ -287,6 +300,5 @@ export function createDashboardCache(workflowDir: string, ttl?: number): CacheMa
|
||||
// Use centralized storage path
|
||||
const projectPath = extractProjectPath(workflowDir);
|
||||
const cacheDir = StoragePaths.project(projectPath).cache;
|
||||
ensureStorageDir(cacheDir);
|
||||
return new CacheManager('dashboard-data', { cacheDir, ttl });
|
||||
}
|
||||
|
||||
@@ -7,6 +7,15 @@ import { execSync } from 'child_process';
|
||||
import { existsSync, statSync, readdirSync } from 'fs';
|
||||
import { dirname, extname, relative, join } from 'path';
|
||||
import { getCoreMemoryStore, ClaudeUpdateRecord } from './core-memory-store.js';
|
||||
import { EXEC_TIMEOUTS } from '../utils/exec-constants.js';
|
||||
|
||||
function isExecTimeoutError(error: unknown): boolean {
|
||||
const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null;
|
||||
const code = err?.code ?? err?.errno;
|
||||
if (code === 'ETIMEDOUT') return true;
|
||||
const message = typeof err?.message === 'string' ? err.message : '';
|
||||
return message.includes('ETIMEDOUT');
|
||||
}
|
||||
|
||||
// Source file extensions to track (from detect-changed-modules.ts)
|
||||
const SOURCE_EXTENSIONS = [
|
||||
@@ -53,9 +62,12 @@ export interface FreshnessResponse {
|
||||
*/
|
||||
function isGitRepo(basePath: string): boolean {
|
||||
try {
|
||||
execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe' });
|
||||
execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe', timeout: EXEC_TIMEOUTS.GIT_QUICK });
|
||||
return true;
|
||||
} catch (e) {
|
||||
} catch (e: unknown) {
|
||||
if (isExecTimeoutError(e)) {
|
||||
console.warn(`[Claude Freshness] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -68,10 +80,14 @@ export function getCurrentGitCommit(basePath: string): string | null {
|
||||
const output = execSync('git rev-parse HEAD', {
|
||||
cwd: basePath,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_QUICK,
|
||||
}).trim();
|
||||
return output || null;
|
||||
} catch (e) {
|
||||
} catch (e: unknown) {
|
||||
if (isExecTimeoutError(e)) {
|
||||
console.warn(`[Claude Freshness] git rev-parse HEAD timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -91,7 +107,8 @@ function getChangedFilesSince(basePath: string, modulePath: string, sinceDate: s
|
||||
{
|
||||
cwd: basePath,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_LOG,
|
||||
}
|
||||
).trim();
|
||||
|
||||
@@ -103,7 +120,10 @@ function getChangedFilesSince(basePath: string, modulePath: string, sinceDate: s
|
||||
const ext = extname(f).toLowerCase();
|
||||
return SOURCE_EXTENSIONS.includes(ext);
|
||||
});
|
||||
} catch (e) {
|
||||
} catch (e: unknown) {
|
||||
if (isExecTimeoutError(e)) {
|
||||
console.warn(`[Claude Freshness] git log timed out after ${EXEC_TIMEOUTS.GIT_LOG}ms, falling back to mtime scan`);
|
||||
}
|
||||
// Fallback to mtime-based detection
|
||||
return findFilesModifiedSince(modulePath, sinceDate);
|
||||
}
|
||||
|
||||
10
ccw/src/core/cors.ts
Normal file
10
ccw/src/core/cors.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export function validateCorsOrigin(origin: string, port: number): boolean {
|
||||
return origin === `http://localhost:${port}` || origin === `http://127.0.0.1:${port}`;
|
||||
}
|
||||
|
||||
export function getCorsOrigin(origin: string | undefined, port: number): string {
|
||||
const fallback = `http://localhost:${port}`;
|
||||
if (!origin) return fallback;
|
||||
return validateCorsOrigin(origin, port) ? origin : fallback;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
// Add after line 13 (after REVIEW_TEMPLATE constant)
|
||||
|
||||
// Modular dashboard JS files (in dependency order)
|
||||
|
||||
@@ -1,8 +1,59 @@
|
||||
// @ts-nocheck
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { join, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
interface ReviewDimensionInfo {
|
||||
count: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface ReviewData {
|
||||
totalFindings: number;
|
||||
severityDistribution: Record<string, number>;
|
||||
dimensionSummary: Record<string, ReviewDimensionInfo>;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface SessionTaskData {
|
||||
status?: string;
|
||||
title?: string;
|
||||
task_id?: string;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface SessionData {
|
||||
session_id?: string;
|
||||
project?: string;
|
||||
created_at?: string;
|
||||
tasks: SessionTaskData[];
|
||||
taskCount: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface DashboardStatistics {
|
||||
totalSessions: number;
|
||||
activeSessions: number;
|
||||
totalTasks: number;
|
||||
completedTasks: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface DashboardData {
|
||||
generatedAt?: string;
|
||||
activeSessions: SessionData[];
|
||||
archivedSessions: SessionData[];
|
||||
statistics: DashboardStatistics;
|
||||
reviewData?: ReviewData;
|
||||
liteTasks?: {
|
||||
litePlan?: unknown[];
|
||||
liteFix?: unknown[];
|
||||
[key: string]: unknown;
|
||||
};
|
||||
projectPath?: string;
|
||||
recentPaths?: string[];
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
@@ -111,18 +162,19 @@ const MODULE_FILES = [
|
||||
* @returns {Promise<string>} - Generated HTML
|
||||
*/
|
||||
export async function generateDashboard(data: unknown): Promise<string> {
|
||||
const dashboardData = (data ?? {}) as DashboardData;
|
||||
// Use new unified template (with sidebar layout)
|
||||
if (existsSync(UNIFIED_TEMPLATE)) {
|
||||
return generateFromUnifiedTemplate(data);
|
||||
return generateFromUnifiedTemplate(dashboardData);
|
||||
}
|
||||
|
||||
// Fallback to legacy workflow template
|
||||
if (existsSync(WORKFLOW_TEMPLATE)) {
|
||||
return generateFromBundledTemplate(data, WORKFLOW_TEMPLATE);
|
||||
return generateFromBundledTemplate(dashboardData, WORKFLOW_TEMPLATE);
|
||||
}
|
||||
|
||||
// Fallback to inline dashboard if templates missing
|
||||
return generateInlineDashboard(data);
|
||||
return generateInlineDashboard(dashboardData);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -130,7 +182,7 @@ export async function generateDashboard(data: unknown): Promise<string> {
|
||||
* @param {Object} data - Dashboard data
|
||||
* @returns {string} - Generated HTML
|
||||
*/
|
||||
function generateFromUnifiedTemplate(data: unknown): string {
|
||||
function generateFromUnifiedTemplate(data: DashboardData): string {
|
||||
let html = readFileSync(UNIFIED_TEMPLATE, 'utf8');
|
||||
|
||||
// Read and concatenate modular CSS files in load order
|
||||
@@ -198,7 +250,7 @@ function generateFromUnifiedTemplate(data: unknown): string {
|
||||
* @param {string} templatePath - Path to workflow-dashboard.html
|
||||
* @returns {string} - Generated HTML
|
||||
*/
|
||||
function generateFromBundledTemplate(data: unknown, templatePath: string): string {
|
||||
function generateFromBundledTemplate(data: DashboardData, templatePath: string): string {
|
||||
let html = readFileSync(templatePath, 'utf8');
|
||||
|
||||
// Prepare workflow data for injection
|
||||
@@ -224,7 +276,7 @@ function generateFromBundledTemplate(data: unknown, templatePath: string): strin
|
||||
* @param {Object} reviewData - Review data to display
|
||||
* @returns {string} - Modified HTML with review tab
|
||||
*/
|
||||
function injectReviewTab(html, reviewData) {
|
||||
function injectReviewTab(html: string, reviewData: ReviewData): string {
|
||||
// Add review tab button in header controls
|
||||
const tabButtonHtml = `
|
||||
<button class="btn" data-tab="reviews" id="reviewTabBtn">Reviews (${reviewData.totalFindings})</button>
|
||||
@@ -266,10 +318,10 @@ function injectReviewTab(html, reviewData) {
|
||||
* @param {Object} reviewData - Review data
|
||||
* @returns {string} - HTML for review section
|
||||
*/
|
||||
function generateReviewSection(reviewData) {
|
||||
function generateReviewSection(reviewData: ReviewData): string {
|
||||
const severityBars = Object.entries(reviewData.severityDistribution)
|
||||
.map(([severity, count]) => {
|
||||
const colors = {
|
||||
const colors: Record<string, string> = {
|
||||
critical: '#c53030',
|
||||
high: '#f56565',
|
||||
medium: '#ed8936',
|
||||
@@ -404,7 +456,7 @@ function generateReviewSection(reviewData) {
|
||||
* @param {Object} reviewData - Review data
|
||||
* @returns {string} - JavaScript code
|
||||
*/
|
||||
function generateReviewScript(reviewData) {
|
||||
function generateReviewScript(reviewData: ReviewData): string {
|
||||
return `
|
||||
// Review tab functionality
|
||||
const reviewTabBtn = document.getElementById('reviewTabBtn');
|
||||
@@ -444,7 +496,7 @@ function generateReviewScript(reviewData) {
|
||||
* @param {Object} data - Dashboard data
|
||||
* @returns {string}
|
||||
*/
|
||||
function generateInlineDashboard(data: unknown): string {
|
||||
function generateInlineDashboard(data: DashboardData): string {
|
||||
const stats = data.statistics;
|
||||
const hasReviews = data.reviewData && data.reviewData.totalFindings > 0;
|
||||
|
||||
@@ -623,7 +675,7 @@ function generateInlineDashboard(data: unknown): string {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
${hasReviews ? renderReviewTab(data.reviewData) : ''}
|
||||
${hasReviews ? renderReviewTab(data.reviewData as ReviewData) : ''}
|
||||
</div>
|
||||
|
||||
<button class="theme-toggle" onclick="toggleTheme()">🌙</button>
|
||||
@@ -666,7 +718,7 @@ function generateInlineDashboard(data: unknown): string {
|
||||
* @param {boolean} isActive - Whether session is active
|
||||
* @returns {string} - HTML string
|
||||
*/
|
||||
function renderSessionCard(session, isActive) {
|
||||
function renderSessionCard(session: SessionData, isActive: boolean): string {
|
||||
const completedTasks = isActive
|
||||
? session.tasks.filter(t => t.status === 'completed').length
|
||||
: session.taskCount;
|
||||
@@ -704,7 +756,7 @@ function renderSessionCard(session, isActive) {
|
||||
* @param {Object} reviewData - Review data
|
||||
* @returns {string} - HTML string
|
||||
*/
|
||||
function renderReviewTab(reviewData) {
|
||||
function renderReviewTab(reviewData: ReviewData): string {
|
||||
const { severityDistribution, dimensionSummary } = reviewData;
|
||||
|
||||
return `
|
||||
@@ -741,4 +793,4 @@ function renderReviewTab(reviewData) {
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,7 +197,7 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s
|
||||
];
|
||||
|
||||
// Check cache first
|
||||
const cachedData = cache.get(watchPaths);
|
||||
const cachedData = await cache.get(watchPaths);
|
||||
if (cachedData !== null) {
|
||||
console.log('Using cached dashboard data');
|
||||
return cachedData;
|
||||
@@ -269,7 +269,7 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s
|
||||
}
|
||||
|
||||
// Store in cache before returning
|
||||
cache.set(data, watchPaths);
|
||||
await cache.set(data, watchPaths);
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { existsSync, readdirSync, readFileSync, statSync } from 'fs';
|
||||
import { readFile, readdir, stat } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
|
||||
interface TaskMeta {
|
||||
@@ -85,10 +85,12 @@ export async function scanLiteTasks(workflowDir: string): Promise<LiteTasks> {
|
||||
const litePlanDir = join(workflowDir, '.lite-plan');
|
||||
const liteFixDir = join(workflowDir, '.lite-fix');
|
||||
|
||||
return {
|
||||
litePlan: scanLiteDir(litePlanDir, 'lite-plan'),
|
||||
liteFix: scanLiteDir(liteFixDir, 'lite-fix')
|
||||
};
|
||||
const [litePlan, liteFix] = await Promise.all([
|
||||
scanLiteDir(litePlanDir, 'lite-plan'),
|
||||
scanLiteDir(liteFixDir, 'lite-fix'),
|
||||
]);
|
||||
|
||||
return { litePlan, liteFix };
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -97,39 +99,45 @@ export async function scanLiteTasks(workflowDir: string): Promise<LiteTasks> {
|
||||
* @param type - Task type ('lite-plan' or 'lite-fix')
|
||||
* @returns Array of lite task sessions
|
||||
*/
|
||||
function scanLiteDir(dir: string, type: string): LiteSession[] {
|
||||
if (!existsSync(dir)) return [];
|
||||
|
||||
async function scanLiteDir(dir: string, type: string): Promise<LiteSession[]> {
|
||||
try {
|
||||
const sessions = readdirSync(dir, { withFileTypes: true })
|
||||
.filter(d => d.isDirectory())
|
||||
.map(d => {
|
||||
const sessionPath = join(dir, d.name);
|
||||
const session: LiteSession = {
|
||||
id: d.name,
|
||||
type,
|
||||
path: sessionPath,
|
||||
createdAt: getCreatedTime(sessionPath),
|
||||
plan: loadPlanJson(sessionPath),
|
||||
tasks: loadTaskJsons(sessionPath),
|
||||
progress: { total: 0, completed: 0, percentage: 0 }
|
||||
};
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
|
||||
// For lite-fix sessions, also load diagnoses separately
|
||||
if (type === 'lite-fix') {
|
||||
session.diagnoses = loadDiagnoses(sessionPath);
|
||||
}
|
||||
const sessions = (await Promise.all(
|
||||
entries
|
||||
.filter((entry) => entry.isDirectory())
|
||||
.map(async (entry) => {
|
||||
const sessionPath = join(dir, entry.name);
|
||||
|
||||
// Calculate progress
|
||||
session.progress = calculateProgress(session.tasks);
|
||||
const [createdAt, plan, tasks, diagnoses] = await Promise.all([
|
||||
getCreatedTime(sessionPath),
|
||||
loadPlanJson(sessionPath),
|
||||
loadTaskJsons(sessionPath),
|
||||
type === 'lite-fix' ? loadDiagnoses(sessionPath) : Promise.resolve(undefined),
|
||||
]);
|
||||
|
||||
return session;
|
||||
})
|
||||
const session: LiteSession = {
|
||||
id: entry.name,
|
||||
type,
|
||||
path: sessionPath,
|
||||
createdAt,
|
||||
plan,
|
||||
tasks,
|
||||
diagnoses,
|
||||
progress: { total: 0, completed: 0, percentage: 0 },
|
||||
};
|
||||
|
||||
session.progress = calculateProgress(session.tasks);
|
||||
return session;
|
||||
}),
|
||||
))
|
||||
.filter((session): session is LiteSession => session !== null)
|
||||
.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
||||
|
||||
return sessions;
|
||||
} catch (err) {
|
||||
console.error(`Error scanning ${dir}:`, (err as Error).message);
|
||||
} catch (err: any) {
|
||||
if (err?.code === 'ENOENT') return [];
|
||||
console.error(`Error scanning ${dir}:`, err?.message || String(err));
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@@ -139,32 +147,26 @@ function scanLiteDir(dir: string, type: string): LiteSession[] {
|
||||
* @param sessionPath - Session directory path
|
||||
* @returns Plan data or null
|
||||
*/
|
||||
function loadPlanJson(sessionPath: string): unknown | null {
|
||||
async function loadPlanJson(sessionPath: string): Promise<unknown | null> {
|
||||
// Try fix-plan.json first (for lite-fix), then plan.json (for lite-plan)
|
||||
const fixPlanPath = join(sessionPath, 'fix-plan.json');
|
||||
const planPath = join(sessionPath, 'plan.json');
|
||||
|
||||
// Try fix-plan.json first
|
||||
if (existsSync(fixPlanPath)) {
|
||||
try {
|
||||
const content = readFileSync(fixPlanPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
// Continue to try plan.json
|
||||
}
|
||||
try {
|
||||
const content = await readFile(fixPlanPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
// Continue to try plan.json
|
||||
}
|
||||
|
||||
// Fallback to plan.json
|
||||
if (existsSync(planPath)) {
|
||||
try {
|
||||
const content = readFileSync(planPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const content = await readFile(planPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -176,54 +178,54 @@ function loadPlanJson(sessionPath: string): unknown | null {
|
||||
* @param sessionPath - Session directory path
|
||||
* @returns Array of task objects
|
||||
*/
|
||||
function loadTaskJsons(sessionPath: string): NormalizedTask[] {
|
||||
async function loadTaskJsons(sessionPath: string): Promise<NormalizedTask[]> {
|
||||
let tasks: NormalizedTask[] = [];
|
||||
|
||||
// Method 1: Check .task/IMPL-*.json files
|
||||
const taskDir = join(sessionPath, '.task');
|
||||
if (existsSync(taskDir)) {
|
||||
try {
|
||||
const implTasks = readdirSync(taskDir)
|
||||
.filter(f => f.endsWith('.json') && (
|
||||
f.startsWith('IMPL-') ||
|
||||
f.startsWith('TASK-') ||
|
||||
f.startsWith('task-') ||
|
||||
f.startsWith('diagnosis-') ||
|
||||
/^T\d+\.json$/i.test(f)
|
||||
))
|
||||
.map(f => {
|
||||
const taskPath = join(taskDir, f);
|
||||
try {
|
||||
const content = readFileSync(taskPath, 'utf8');
|
||||
return normalizeTask(JSON.parse(content));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((t): t is NormalizedTask => t !== null);
|
||||
tasks = tasks.concat(implTasks);
|
||||
} catch {
|
||||
// Continue to other methods
|
||||
}
|
||||
try {
|
||||
const implFiles = (await readdir(taskDir))
|
||||
.filter((fileName) => fileName.endsWith('.json') && (
|
||||
fileName.startsWith('IMPL-') ||
|
||||
fileName.startsWith('TASK-') ||
|
||||
fileName.startsWith('task-') ||
|
||||
fileName.startsWith('diagnosis-') ||
|
||||
/^T\d+\.json$/i.test(fileName)
|
||||
));
|
||||
|
||||
const implTasks = (await Promise.all(
|
||||
implFiles.map(async (fileName) => {
|
||||
const taskPath = join(taskDir, fileName);
|
||||
try {
|
||||
const content = await readFile(taskPath, 'utf8');
|
||||
return normalizeTask(JSON.parse(content));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
))
|
||||
.filter((task): task is NormalizedTask => task !== null);
|
||||
|
||||
tasks = tasks.concat(implTasks);
|
||||
} catch {
|
||||
// Continue to other methods
|
||||
}
|
||||
|
||||
// Method 2: Check plan.json or fix-plan.json for embedded tasks array
|
||||
if (tasks.length === 0) {
|
||||
// Try fix-plan.json first (for lite-fix), then plan.json (for lite-plan)
|
||||
const fixPlanPath = join(sessionPath, 'fix-plan.json');
|
||||
const planPath = join(sessionPath, 'plan.json');
|
||||
const planFiles = [join(sessionPath, 'fix-plan.json'), join(sessionPath, 'plan.json')];
|
||||
|
||||
const planFile = existsSync(fixPlanPath) ? fixPlanPath :
|
||||
existsSync(planPath) ? planPath : null;
|
||||
|
||||
if (planFile) {
|
||||
for (const planFile of planFiles) {
|
||||
try {
|
||||
const plan = JSON.parse(readFileSync(planFile, 'utf8')) as { tasks?: unknown[] };
|
||||
const plan = JSON.parse(await readFile(planFile, 'utf8')) as { tasks?: unknown[] };
|
||||
if (Array.isArray(plan.tasks)) {
|
||||
tasks = plan.tasks.map(t => normalizeTask(t)).filter((t): t is NormalizedTask => t !== null);
|
||||
tasks = plan.tasks
|
||||
.map((task) => normalizeTask(task))
|
||||
.filter((task): task is NormalizedTask => task !== null);
|
||||
break;
|
||||
}
|
||||
} catch {
|
||||
// Continue to other methods
|
||||
// Continue to other plan files
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -231,23 +233,27 @@ function loadTaskJsons(sessionPath: string): NormalizedTask[] {
|
||||
// Method 3: Check for task-*.json and diagnosis-*.json files in session root
|
||||
if (tasks.length === 0) {
|
||||
try {
|
||||
const rootTasks = readdirSync(sessionPath)
|
||||
.filter(f => f.endsWith('.json') && (
|
||||
f.startsWith('task-') ||
|
||||
f.startsWith('TASK-') ||
|
||||
f.startsWith('diagnosis-') ||
|
||||
/^T\d+\.json$/i.test(f)
|
||||
))
|
||||
.map(f => {
|
||||
const taskPath = join(sessionPath, f);
|
||||
const rootFiles = (await readdir(sessionPath))
|
||||
.filter((fileName) => fileName.endsWith('.json') && (
|
||||
fileName.startsWith('task-') ||
|
||||
fileName.startsWith('TASK-') ||
|
||||
fileName.startsWith('diagnosis-') ||
|
||||
/^T\d+\.json$/i.test(fileName)
|
||||
));
|
||||
|
||||
const rootTasks = (await Promise.all(
|
||||
rootFiles.map(async (fileName) => {
|
||||
const taskPath = join(sessionPath, fileName);
|
||||
try {
|
||||
const content = readFileSync(taskPath, 'utf8');
|
||||
const content = await readFile(taskPath, 'utf8');
|
||||
return normalizeTask(JSON.parse(content));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
})
|
||||
.filter((t): t is NormalizedTask => t !== null);
|
||||
}),
|
||||
))
|
||||
.filter((task): task is NormalizedTask => task !== null);
|
||||
|
||||
tasks = tasks.concat(rootTasks);
|
||||
} catch {
|
||||
// No tasks found
|
||||
@@ -333,10 +339,10 @@ function normalizeTask(task: unknown): NormalizedTask | null {
|
||||
* @param dirPath - Directory path
|
||||
* @returns ISO date string
|
||||
*/
|
||||
function getCreatedTime(dirPath: string): string {
|
||||
async function getCreatedTime(dirPath: string): Promise<string> {
|
||||
try {
|
||||
const stat = statSync(dirPath);
|
||||
return stat.birthtime.toISOString();
|
||||
const stats = await stat(dirPath);
|
||||
return stats.birthtime.toISOString();
|
||||
} catch {
|
||||
return new Date().toISOString();
|
||||
}
|
||||
@@ -366,28 +372,37 @@ function calculateProgress(tasks: NormalizedTask[]): Progress {
|
||||
* @param sessionId - Session ID
|
||||
* @returns Detailed task info
|
||||
*/
|
||||
export function getLiteTaskDetail(workflowDir: string, type: string, sessionId: string): LiteTaskDetail | null {
|
||||
export async function getLiteTaskDetail(workflowDir: string, type: string, sessionId: string): Promise<LiteTaskDetail | null> {
|
||||
const dir = type === 'lite-plan'
|
||||
? join(workflowDir, '.lite-plan', sessionId)
|
||||
: join(workflowDir, '.lite-fix', sessionId);
|
||||
|
||||
if (!existsSync(dir)) return null;
|
||||
try {
|
||||
const stats = await stat(dir);
|
||||
if (!stats.isDirectory()) return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [plan, tasks, explorations, clarifications, diagnoses] = await Promise.all([
|
||||
loadPlanJson(dir),
|
||||
loadTaskJsons(dir),
|
||||
loadExplorations(dir),
|
||||
loadClarifications(dir),
|
||||
type === 'lite-fix' ? loadDiagnoses(dir) : Promise.resolve(undefined),
|
||||
]);
|
||||
|
||||
const detail: LiteTaskDetail = {
|
||||
id: sessionId,
|
||||
type,
|
||||
path: dir,
|
||||
plan: loadPlanJson(dir),
|
||||
tasks: loadTaskJsons(dir),
|
||||
explorations: loadExplorations(dir),
|
||||
clarifications: loadClarifications(dir)
|
||||
plan,
|
||||
tasks,
|
||||
explorations,
|
||||
clarifications,
|
||||
diagnoses,
|
||||
};
|
||||
|
||||
// For lite-fix sessions, also load diagnoses
|
||||
if (type === 'lite-fix') {
|
||||
detail.diagnoses = loadDiagnoses(dir);
|
||||
}
|
||||
|
||||
return detail;
|
||||
}
|
||||
|
||||
@@ -396,12 +411,11 @@ export function getLiteTaskDetail(workflowDir: string, type: string, sessionId:
|
||||
* @param sessionPath - Session directory path
|
||||
* @returns Exploration results
|
||||
*/
|
||||
function loadExplorations(sessionPath: string): unknown[] {
|
||||
async function loadExplorations(sessionPath: string): Promise<unknown[]> {
|
||||
const explorePath = join(sessionPath, 'explorations.json');
|
||||
if (!existsSync(explorePath)) return [];
|
||||
|
||||
try {
|
||||
const content = readFileSync(explorePath, 'utf8');
|
||||
const content = await readFile(explorePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
return [];
|
||||
@@ -413,12 +427,11 @@ function loadExplorations(sessionPath: string): unknown[] {
|
||||
* @param sessionPath - Session directory path
|
||||
* @returns Clarification data
|
||||
*/
|
||||
function loadClarifications(sessionPath: string): unknown | null {
|
||||
async function loadClarifications(sessionPath: string): Promise<unknown | null> {
|
||||
const clarifyPath = join(sessionPath, 'clarifications.json');
|
||||
if (!existsSync(clarifyPath)) return null;
|
||||
|
||||
try {
|
||||
const content = readFileSync(clarifyPath, 'utf8');
|
||||
const content = await readFile(clarifyPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch {
|
||||
return null;
|
||||
@@ -431,7 +444,7 @@ function loadClarifications(sessionPath: string): unknown | null {
|
||||
* @param sessionPath - Session directory path
|
||||
* @returns Diagnoses data with manifest and items
|
||||
*/
|
||||
function loadDiagnoses(sessionPath: string): Diagnoses {
|
||||
async function loadDiagnoses(sessionPath: string): Promise<Diagnoses> {
|
||||
const result: Diagnoses = {
|
||||
manifest: null,
|
||||
items: []
|
||||
@@ -439,32 +452,35 @@ function loadDiagnoses(sessionPath: string): Diagnoses {
|
||||
|
||||
// Try to load diagnoses-manifest.json first
|
||||
const manifestPath = join(sessionPath, 'diagnoses-manifest.json');
|
||||
if (existsSync(manifestPath)) {
|
||||
try {
|
||||
result.manifest = JSON.parse(readFileSync(manifestPath, 'utf8'));
|
||||
} catch {
|
||||
// Continue without manifest
|
||||
}
|
||||
try {
|
||||
result.manifest = JSON.parse(await readFile(manifestPath, 'utf8'));
|
||||
} catch {
|
||||
// Continue without manifest
|
||||
}
|
||||
|
||||
// Load all diagnosis-*.json files from session root
|
||||
try {
|
||||
const diagnosisFiles = readdirSync(sessionPath)
|
||||
.filter(f => f.startsWith('diagnosis-') && f.endsWith('.json'));
|
||||
const diagnosisFiles = (await readdir(sessionPath))
|
||||
.filter((fileName) => fileName.startsWith('diagnosis-') && fileName.endsWith('.json'));
|
||||
|
||||
for (const file of diagnosisFiles) {
|
||||
const filePath = join(sessionPath, file);
|
||||
try {
|
||||
const content = JSON.parse(readFileSync(filePath, 'utf8')) as Record<string, unknown>;
|
||||
result.items.push({
|
||||
id: file.replace('diagnosis-', '').replace('.json', ''),
|
||||
filename: file,
|
||||
...content
|
||||
});
|
||||
} catch {
|
||||
// Skip invalid files
|
||||
}
|
||||
}
|
||||
const items = (await Promise.all(
|
||||
diagnosisFiles.map(async (fileName) => {
|
||||
const filePath = join(sessionPath, fileName);
|
||||
try {
|
||||
const content = JSON.parse(await readFile(filePath, 'utf8')) as Record<string, unknown>;
|
||||
return {
|
||||
id: fileName.replace('diagnosis-', '').replace('.json', ''),
|
||||
filename: fileName,
|
||||
...content,
|
||||
} satisfies DiagnosisItem;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}),
|
||||
))
|
||||
.filter((item): item is DiagnosisItem => item !== null);
|
||||
|
||||
result.items.push(...items);
|
||||
} catch {
|
||||
// Return empty items if directory read fails
|
||||
}
|
||||
|
||||
98
ccw/src/core/routes/auth-routes.ts
Normal file
98
ccw/src/core/routes/auth-routes.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { randomBytes } from 'crypto';
|
||||
import { getCsrfTokenManager } from '../auth/csrf-manager.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
function getHeaderValue(header: string | string[] | undefined): string | null {
|
||||
if (!header) return null;
|
||||
if (Array.isArray(header)) return header[0] ?? null;
|
||||
return header;
|
||||
}
|
||||
|
||||
function parseCookieHeader(cookieHeader: string | null | undefined): Record<string, string> {
|
||||
if (!cookieHeader) return {};
|
||||
|
||||
const cookies: Record<string, string> = {};
|
||||
for (const part of cookieHeader.split(';')) {
|
||||
const [rawName, ...rawValueParts] = part.trim().split('=');
|
||||
if (!rawName) continue;
|
||||
const rawValue = rawValueParts.join('=');
|
||||
try {
|
||||
cookies[rawName] = decodeURIComponent(rawValue);
|
||||
} catch {
|
||||
cookies[rawName] = rawValue;
|
||||
}
|
||||
}
|
||||
return cookies;
|
||||
}
|
||||
|
||||
function appendSetCookie(res: ServerResponse, cookie: string): void {
|
||||
const existing = res.getHeader('Set-Cookie');
|
||||
if (!existing) {
|
||||
res.setHeader('Set-Cookie', cookie);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(existing)) {
|
||||
res.setHeader('Set-Cookie', [...existing, cookie]);
|
||||
return;
|
||||
}
|
||||
|
||||
res.setHeader('Set-Cookie', [String(existing), cookie]);
|
||||
}
|
||||
|
||||
function getOrCreateSessionId(req: IncomingMessage, res: ServerResponse): string {
|
||||
const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie));
|
||||
const existing = cookies.ccw_session_id;
|
||||
if (existing) return existing;
|
||||
|
||||
const created = randomBytes(16).toString('hex');
|
||||
const attributes = [
|
||||
`ccw_session_id=${encodeURIComponent(created)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${24 * 60 * 60}`,
|
||||
];
|
||||
appendSetCookie(res, attributes.join('; '));
|
||||
return created;
|
||||
}
|
||||
|
||||
function setCsrfCookie(res: ServerResponse, token: string, maxAgeSeconds: number): void {
|
||||
const attributes = [
|
||||
`XSRF-TOKEN=${encodeURIComponent(token)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${maxAgeSeconds}`,
|
||||
];
|
||||
appendSetCookie(res, attributes.join('; '));
|
||||
}
|
||||
|
||||
export async function handleAuthRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, req, res } = ctx;
|
||||
|
||||
if (pathname === '/api/csrf-token' && req.method === 'GET') {
|
||||
const sessionId = getOrCreateSessionId(req, res);
|
||||
const tokenManager = getCsrfTokenManager();
|
||||
const csrfToken = tokenManager.generateToken(sessionId);
|
||||
|
||||
res.setHeader('X-CSRF-Token', csrfToken);
|
||||
setCsrfCookie(res, csrfToken, 15 * 60);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify({ csrfToken }));
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* CCW Routes Module
|
||||
* Handles all CCW-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { getAllManifests } from '../manifest.js';
|
||||
import { listTools } from '../../tools/index.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
/**
|
||||
* Handle CCW routes
|
||||
@@ -43,13 +32,14 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: CCW Upgrade
|
||||
if (pathname === '/api/ccw/upgrade' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: installPath } = body;
|
||||
const { path: installPath } = body as { path?: unknown };
|
||||
const resolvedInstallPath = typeof installPath === 'string' && installPath.trim().length > 0 ? installPath : undefined;
|
||||
|
||||
try {
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
// Run ccw upgrade command
|
||||
const args = installPath ? ['upgrade', '--all'] : ['upgrade', '--all'];
|
||||
const args = resolvedInstallPath ? ['upgrade', '--all'] : ['upgrade', '--all'];
|
||||
const upgradeProcess = spawn('ccw', args, {
|
||||
shell: true,
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
@@ -58,16 +48,16 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
upgradeProcess.stdout.on('data', (data) => {
|
||||
upgradeProcess.stdout?.on('data', (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
upgradeProcess.stderr.on('data', (data) => {
|
||||
upgradeProcess.stderr?.on('data', (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
upgradeProcess.on('close', (code) => {
|
||||
upgradeProcess.on('close', (code: number | null) => {
|
||||
if (code === 0) {
|
||||
resolve({ success: true, message: 'Upgrade completed', output: stdout });
|
||||
} else {
|
||||
@@ -75,7 +65,7 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
}
|
||||
});
|
||||
|
||||
upgradeProcess.on('error', (err) => {
|
||||
upgradeProcess.on('error', (err: Error) => {
|
||||
resolve({ success: false, error: err.message, status: 500 });
|
||||
});
|
||||
|
||||
@@ -85,8 +75,8 @@ export async function handleCcwRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
resolve({ success: false, error: 'Upgrade timed out', status: 504 });
|
||||
}, 120000);
|
||||
});
|
||||
} catch (err) {
|
||||
return { success: false, error: err.message, status: 500 };
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
|
||||
@@ -1,22 +1,11 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* CLAUDE.md Routes Module
|
||||
* Handles all CLAUDE.md memory rules management endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, writeFileSync, existsSync, readdirSync, statSync, unlinkSync, mkdirSync } from 'fs';
|
||||
import { join, relative } from 'path';
|
||||
import { dirname, join, relative } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
interface ClaudeFile {
|
||||
id: string;
|
||||
@@ -498,7 +487,7 @@ function createNewClaudeFile(level: 'user' | 'project' | 'module', template: str
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
const dir = filePath.substring(0, filePath.lastIndexOf('/') || filePath.lastIndexOf('\\'));
|
||||
const dir = dirname(filePath);
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
@@ -616,7 +605,7 @@ export async function handleClaudeRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (!result.success) {
|
||||
return {
|
||||
error: 'CLI execution failed',
|
||||
details: result.execution?.error || 'No output received',
|
||||
details: result.stderr || result.execution?.output?.stderr || 'No output received',
|
||||
status: 500
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* CLI Routes Module
|
||||
* Handles all CLI-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import {
|
||||
getCliToolsStatus,
|
||||
getCliToolsFullStatus,
|
||||
@@ -44,16 +42,7 @@ import {
|
||||
updateCodeIndexMcp,
|
||||
getCodeIndexMcp
|
||||
} from '../../tools/claude-cli-tools.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========== Active Executions State ==========
|
||||
// Stores running CLI executions for state recovery when view is opened/refreshed
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
37
ccw/src/core/routes/codexlens/README.md
Normal file
37
ccw/src/core/routes/codexlens/README.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# CodexLens Routes
|
||||
|
||||
CodexLens-related HTTP endpoints are handled by `ccw/src/core/routes/codexlens-routes.ts`, which delegates to handler modules in this directory. Each handler returns `true` when it handles the current request.
|
||||
|
||||
## File Map
|
||||
|
||||
- `ccw/src/core/routes/codexlens/utils.ts` – shared helpers (ANSI stripping + robust JSON extraction from CLI output).
|
||||
- `ccw/src/core/routes/codexlens/index-handlers.ts` – index/project management endpoints:
|
||||
- `GET /api/codexlens/indexes`
|
||||
- `POST /api/codexlens/clean`
|
||||
- `POST /api/codexlens/init`
|
||||
- `POST /api/codexlens/cancel`
|
||||
- `GET /api/codexlens/indexing-status`
|
||||
- `ccw/src/core/routes/codexlens/config-handlers.ts` – install/config/environment endpoints:
|
||||
- `GET /api/codexlens/status`
|
||||
- `GET /api/codexlens/dashboard-init`
|
||||
- `POST /api/codexlens/bootstrap`
|
||||
- `POST /api/codexlens/uninstall`
|
||||
- `GET /api/codexlens/config`
|
||||
- `POST /api/codexlens/config`
|
||||
- GPU: `GET /api/codexlens/gpu/detect`, `GET /api/codexlens/gpu/list`, `POST /api/codexlens/gpu/select`, `POST /api/codexlens/gpu/reset`
|
||||
- Models: `GET /api/codexlens/models`, `POST /api/codexlens/models/download`, `POST /api/codexlens/models/delete`, `GET /api/codexlens/models/info`
|
||||
- Env: `GET /api/codexlens/env`, `POST /api/codexlens/env`
|
||||
- `ccw/src/core/routes/codexlens/semantic-handlers.ts` – semantic search + reranker + SPLADE endpoints:
|
||||
- Semantic: `GET /api/codexlens/semantic/status`, `GET /api/codexlens/semantic/metadata`, `POST /api/codexlens/semantic/install`
|
||||
- Search: `GET /api/codexlens/search`, `GET /api/codexlens/search_files`, `GET /api/codexlens/symbol`, `POST /api/codexlens/enhance`
|
||||
- Reranker: `GET /api/codexlens/reranker/config`, `POST /api/codexlens/reranker/config`, `GET /api/codexlens/reranker/models`, `POST /api/codexlens/reranker/models/download`, `POST /api/codexlens/reranker/models/delete`, `GET /api/codexlens/reranker/models/info`
|
||||
- SPLADE: `GET /api/codexlens/splade/status`, `POST /api/codexlens/splade/install`, `GET /api/codexlens/splade/index-status`, `POST /api/codexlens/splade/rebuild`
|
||||
- `ccw/src/core/routes/codexlens/watcher-handlers.ts` – file watcher endpoints:
|
||||
- `GET /api/codexlens/watch/status`
|
||||
- `POST /api/codexlens/watch/start`
|
||||
- `POST /api/codexlens/watch/stop`
|
||||
- Also exports `stopWatcherForUninstall()` used during uninstall flow.
|
||||
|
||||
## Notes
|
||||
|
||||
- CodexLens CLI output may include logging + ANSI escapes even with `--json`; handlers use `extractJSON()` from `utils.ts` to parse reliably.
|
||||
913
ccw/src/core/routes/codexlens/config-handlers.ts
Normal file
913
ccw/src/core/routes/codexlens/config-handlers.ts
Normal file
@@ -0,0 +1,913 @@
|
||||
/**
|
||||
* CodexLens configuration + environment handlers.
|
||||
*/
|
||||
|
||||
import {
|
||||
bootstrapVenv,
|
||||
cancelIndexing,
|
||||
checkSemanticStatus,
|
||||
checkVenvStatus,
|
||||
detectGpuSupport,
|
||||
executeCodexLens,
|
||||
isIndexingInProgress,
|
||||
uninstallCodexLens,
|
||||
} from '../../../tools/codex-lens.js';
|
||||
import type { RouteContext } from '../types.js';
|
||||
import { EXEC_TIMEOUTS } from '../../../utils/exec-constants.js';
|
||||
import { extractJSON } from './utils.js';
|
||||
import { stopWatcherForUninstall } from './watcher-handlers.js';
|
||||
|
||||
export async function handleCodexLensConfigRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx;
|
||||
|
||||
// API: CodexLens Status
|
||||
if (pathname === '/api/codexlens/status') {
|
||||
const status = await checkVenvStatus();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(status));
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Dashboard Init - Aggregated endpoint for page initialization
|
||||
if (pathname === '/api/codexlens/dashboard-init') {
|
||||
try {
|
||||
const venvStatus = await checkVenvStatus();
|
||||
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
installed: false,
|
||||
status: venvStatus,
|
||||
config: { index_dir: '~/.codexlens/indexes', index_count: 0 },
|
||||
semantic: { available: false }
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
// Parallel fetch all initialization data
|
||||
const [configResult, statusResult, semanticStatus] = await Promise.all([
|
||||
executeCodexLens(['config', '--json']),
|
||||
executeCodexLens(['status', '--json']),
|
||||
checkSemanticStatus()
|
||||
]);
|
||||
|
||||
// Parse config
|
||||
let config = { index_dir: '~/.codexlens/indexes', index_count: 0 };
|
||||
if (configResult.success) {
|
||||
try {
|
||||
const configData = extractJSON(configResult.output ?? '');
|
||||
if (configData.success && configData.result) {
|
||||
config.index_dir = configData.result.index_dir || configData.result.index_root || config.index_dir;
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse config for dashboard init:', e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
// Parse status
|
||||
let statusData: any = {};
|
||||
if (statusResult.success) {
|
||||
try {
|
||||
const status = extractJSON(statusResult.output ?? '');
|
||||
if (status.success && status.result) {
|
||||
config.index_count = status.result.projects_count || 0;
|
||||
statusData = status.result;
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse status for dashboard init:', e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
installed: true,
|
||||
status: venvStatus,
|
||||
config,
|
||||
semantic: semanticStatus,
|
||||
statusData
|
||||
}));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Bootstrap (Install)
|
||||
if (pathname === '/api/codexlens/bootstrap' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async () => {
|
||||
try {
|
||||
const result = await bootstrapVenv();
|
||||
if (result.success) {
|
||||
const status = await checkVenvStatus();
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INSTALLED',
|
||||
payload: { version: status.version, timestamp: new Date().toISOString() }
|
||||
});
|
||||
return { success: true, message: 'CodexLens installed successfully', version: status.version };
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Uninstall
|
||||
if (pathname === '/api/codexlens/uninstall' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async () => {
|
||||
try {
|
||||
// Stop watcher if running (to release file handles)
|
||||
await stopWatcherForUninstall();
|
||||
|
||||
if (isIndexingInProgress()) {
|
||||
console.log('[CodexLens] Cancelling indexing before uninstall...');
|
||||
try {
|
||||
cancelIndexing();
|
||||
} catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
|
||||
// Wait a moment for processes to fully exit and release handles
|
||||
await new Promise(resolve => setTimeout(resolve, 1000));
|
||||
|
||||
const result = await uninstallCodexLens();
|
||||
if (result.success) {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_UNINSTALLED',
|
||||
payload: { timestamp: new Date().toISOString() }
|
||||
});
|
||||
return { success: true, message: 'CodexLens uninstalled successfully' };
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Config - GET (Get current configuration with index count)
|
||||
if (pathname === '/api/codexlens/config' && req.method === 'GET') {
|
||||
try {
|
||||
const venvStatus = await checkVenvStatus();
|
||||
let responseData = { index_dir: '~/.codexlens/indexes', index_count: 0, api_max_workers: 4, api_batch_size: 8 };
|
||||
|
||||
// If not installed, return default config without executing CodexLens
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(responseData));
|
||||
return true;
|
||||
}
|
||||
|
||||
const [configResult, statusResult] = await Promise.all([
|
||||
executeCodexLens(['config', '--json']),
|
||||
executeCodexLens(['status', '--json'])
|
||||
]);
|
||||
|
||||
// Parse config (extract JSON from output that may contain log messages)
|
||||
if (configResult.success) {
|
||||
try {
|
||||
const config = extractJSON(configResult.output ?? '');
|
||||
if (config.success && config.result) {
|
||||
// CLI returns index_dir (not index_root)
|
||||
responseData.index_dir = config.result.index_dir || config.result.index_root || responseData.index_dir;
|
||||
// Extract API settings
|
||||
if (config.result.api_max_workers !== undefined) {
|
||||
responseData.api_max_workers = config.result.api_max_workers;
|
||||
}
|
||||
if (config.result.api_batch_size !== undefined) {
|
||||
responseData.api_batch_size = config.result.api_batch_size;
|
||||
}
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse config:', e instanceof Error ? e.message : String(e));
|
||||
console.error('[CodexLens] Config output:', (configResult.output ?? '').substring(0, 200));
|
||||
}
|
||||
}
|
||||
|
||||
// Parse status to get index_count (projects_count)
|
||||
if (statusResult.success) {
|
||||
try {
|
||||
const status = extractJSON(statusResult.output ?? '');
|
||||
if (status.success && status.result) {
|
||||
responseData.index_count = status.result.projects_count || 0;
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse status:', e instanceof Error ? e.message : String(e));
|
||||
console.error('[CodexLens] Status output:', (statusResult.output ?? '').substring(0, 200));
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(responseData));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Config - POST (Set configuration)
|
||||
if (pathname === '/api/codexlens/config' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body: unknown) => {
|
||||
const { index_dir, api_max_workers, api_batch_size } = body as {
|
||||
index_dir?: unknown;
|
||||
api_max_workers?: unknown;
|
||||
api_batch_size?: unknown;
|
||||
};
|
||||
|
||||
if (!index_dir) {
|
||||
return { success: false, error: 'index_dir is required', status: 400 };
|
||||
}
|
||||
|
||||
// Validate index_dir path
|
||||
const indexDirStr = String(index_dir).trim();
|
||||
|
||||
// Check for dangerous patterns
|
||||
if (indexDirStr.includes('\0')) {
|
||||
return { success: false, error: 'Invalid path: contains null bytes', status: 400 };
|
||||
}
|
||||
|
||||
// Prevent system root paths and their subdirectories (Windows and Unix)
|
||||
const dangerousPaths = ['/', 'C:\\', 'C:/', '/etc', '/usr', '/bin', '/sys', '/proc', '/var',
|
||||
'C:\\Windows', 'C:\\Program Files', 'C:\\Program Files (x86)', 'C:\\System32'];
|
||||
const normalizedPath = indexDirStr.replace(/\\/g, '/').toLowerCase();
|
||||
for (const dangerous of dangerousPaths) {
|
||||
const dangerousLower = dangerous.replace(/\\/g, '/').toLowerCase();
|
||||
// Block exact match OR any subdirectory (using startsWith)
|
||||
if (normalizedPath === dangerousLower ||
|
||||
normalizedPath === dangerousLower + '/' ||
|
||||
normalizedPath.startsWith(dangerousLower + '/')) {
|
||||
return { success: false, error: 'Invalid path: cannot use system directories or their subdirectories', status: 400 };
|
||||
}
|
||||
}
|
||||
|
||||
// Additional check: prevent path traversal attempts
|
||||
if (normalizedPath.includes('../') || normalizedPath.includes('/..')) {
|
||||
return { success: false, error: 'Invalid path: path traversal not allowed', status: 400 };
|
||||
}
|
||||
|
||||
// Validate api settings
|
||||
if (api_max_workers !== undefined) {
|
||||
const workers = Number(api_max_workers);
|
||||
if (isNaN(workers) || workers < 1 || workers > 32) {
|
||||
return { success: false, error: 'api_max_workers must be between 1 and 32', status: 400 };
|
||||
}
|
||||
}
|
||||
if (api_batch_size !== undefined) {
|
||||
const batch = Number(api_batch_size);
|
||||
if (isNaN(batch) || batch < 1 || batch > 64) {
|
||||
return { success: false, error: 'api_batch_size must be between 1 and 64', status: 400 };
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Set index_dir
|
||||
const result = await executeCodexLens(['config', 'set', 'index_dir', indexDirStr, '--json']);
|
||||
if (!result.success) {
|
||||
return { success: false, error: result.error || 'Failed to update index_dir', status: 500 };
|
||||
}
|
||||
|
||||
// Set API settings if provided
|
||||
if (api_max_workers !== undefined) {
|
||||
await executeCodexLens(['config', 'set', 'api_max_workers', String(api_max_workers), '--json']);
|
||||
}
|
||||
if (api_batch_size !== undefined) {
|
||||
await executeCodexLens(['config', 'set', 'api_batch_size', String(api_batch_size), '--json']);
|
||||
}
|
||||
|
||||
return { success: true, message: 'Configuration updated successfully' };
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Detect GPU support for semantic search
|
||||
if (pathname === '/api/codexlens/gpu/detect' && req.method === 'GET') {
|
||||
try {
|
||||
const gpuInfo = await detectGpuSupport();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...gpuInfo }));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: List available GPU devices for selection
|
||||
if (pathname === '/api/codexlens/gpu/list' && req.method === 'GET') {
|
||||
try {
|
||||
// Try CodexLens gpu-list first if available
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (venvStatus.ready) {
|
||||
const result = await executeCodexLens(['gpu-list', '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
if (parsed.devices && parsed.devices.length > 0) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// Fall through to system detection
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Use system commands to detect GPUs
|
||||
const devices: Array<{ name: string; type: string; index: number }> = [];
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
// Windows: Use WMIC to get GPU info
|
||||
try {
|
||||
const { execSync } = await import('child_process');
|
||||
const wmicOutput = execSync('wmic path win32_VideoController get name', {
|
||||
encoding: 'utf-8',
|
||||
timeout: EXEC_TIMEOUTS.SYSTEM_INFO,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
const lines = wmicOutput.split('\n')
|
||||
.map(line => line.trim())
|
||||
.filter(line => line && line !== 'Name');
|
||||
|
||||
lines.forEach((name, index) => {
|
||||
if (name) {
|
||||
const isIntegrated = name.toLowerCase().includes('intel') ||
|
||||
name.toLowerCase().includes('integrated');
|
||||
devices.push({
|
||||
name: name,
|
||||
type: isIntegrated ? 'integrated' : 'discrete',
|
||||
index: index
|
||||
});
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn('[CodexLens] WMIC GPU detection failed:', (e as Error).message);
|
||||
}
|
||||
} else {
|
||||
// Linux/Mac: Try nvidia-smi for NVIDIA GPUs
|
||||
try {
|
||||
const { execSync } = await import('child_process');
|
||||
const nvidiaOutput = execSync('nvidia-smi --query-gpu=name --format=csv,noheader', {
|
||||
encoding: 'utf-8',
|
||||
timeout: EXEC_TIMEOUTS.SYSTEM_INFO,
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
const lines = nvidiaOutput.split('\n').filter(line => line.trim());
|
||||
lines.forEach((name, index) => {
|
||||
devices.push({
|
||||
name: name.trim(),
|
||||
type: 'discrete',
|
||||
index: index
|
||||
});
|
||||
});
|
||||
} catch {
|
||||
// NVIDIA not available, that's fine
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, devices: devices, selected_device_id: null }));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Select GPU device for embedding
|
||||
if (pathname === '/api/codexlens/gpu/select' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { device_id } = body as { device_id?: unknown };
|
||||
const resolvedDeviceId = typeof device_id === 'string' || typeof device_id === 'number' ? device_id : undefined;
|
||||
|
||||
if (resolvedDeviceId === undefined) {
|
||||
return { success: false, error: 'device_id is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['gpu-select', String(resolvedDeviceId), '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return parsed;
|
||||
} catch {
|
||||
return { success: true, message: 'GPU selected', output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Reset GPU selection to auto-detection
|
||||
if (pathname === '/api/codexlens/gpu/reset' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async () => {
|
||||
try {
|
||||
const result = await executeCodexLens(['gpu-reset', '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return parsed;
|
||||
} catch {
|
||||
return { success: true, message: 'GPU selection reset', output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model List (list available embedding models)
|
||||
if (pathname === '/api/codexlens/models' && req.method === 'GET') {
|
||||
try {
|
||||
// Check if CodexLens is installed first (without auto-installing)
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' }));
|
||||
return true;
|
||||
}
|
||||
const result = await executeCodexLens(['model-list', '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Download (download embedding model by profile)
|
||||
if (pathname === '/api/codexlens/models/download' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body as { profile?: unknown };
|
||||
const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined;
|
||||
|
||||
if (!resolvedProfile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-download', resolvedProfile, '--json'], { timeout: 600000 }); // 10 min for download
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Delete (delete embedding model by profile)
|
||||
if (pathname === '/api/codexlens/models/delete' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body as { profile?: unknown };
|
||||
const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined;
|
||||
|
||||
if (!resolvedProfile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-delete', resolvedProfile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Info (get model info by profile)
|
||||
if (pathname === '/api/codexlens/models/info' && req.method === 'GET') {
|
||||
const profile = url.searchParams.get('profile');
|
||||
|
||||
if (!profile) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'profile parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-info', profile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Failed to parse response' }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// ENV FILE MANAGEMENT ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
// API: Get global env file content
|
||||
if (pathname === '/api/codexlens/env' && req.method === 'GET') {
|
||||
try {
|
||||
const { homedir } = await import('os');
|
||||
const { join } = await import('path');
|
||||
const { readFile } = await import('fs/promises');
|
||||
|
||||
const envPath = join(homedir(), '.codexlens', '.env');
|
||||
let content = '';
|
||||
try {
|
||||
content = await readFile(envPath, 'utf-8');
|
||||
} catch {
|
||||
// File doesn't exist, return empty
|
||||
}
|
||||
|
||||
// Parse env file into key-value pairs (robust parsing)
|
||||
const envVars: Record<string, string> = {};
|
||||
const lines = content.split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
// Skip empty lines and comments
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Find first = that's part of key=value (not in a quote)
|
||||
const eqIndex = trimmed.indexOf('=');
|
||||
if (eqIndex <= 0) continue;
|
||||
|
||||
const key = trimmed.substring(0, eqIndex).trim();
|
||||
// Validate key format (alphanumeric + underscore)
|
||||
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue;
|
||||
|
||||
let value = trimmed.substring(eqIndex + 1);
|
||||
|
||||
// Handle quoted values (preserves = inside quotes)
|
||||
if (value.startsWith('"')) {
|
||||
// Find matching closing quote (handle escaped quotes)
|
||||
let end = 1;
|
||||
while (end < value.length) {
|
||||
if (value[end] === '"' && value[end - 1] !== '\\') break;
|
||||
end++;
|
||||
}
|
||||
value = value.substring(1, end).replace(/\\"/g, '"');
|
||||
} else if (value.startsWith("'")) {
|
||||
// Single quotes don't support escaping
|
||||
const end = value.indexOf("'", 1);
|
||||
value = end > 0 ? value.substring(1, end) : value.substring(1);
|
||||
} else {
|
||||
// Unquoted: trim and take until comment or end
|
||||
const commentIndex = value.indexOf(' #');
|
||||
if (commentIndex > 0) {
|
||||
value = value.substring(0, commentIndex);
|
||||
}
|
||||
value = value.trim();
|
||||
}
|
||||
|
||||
envVars[key] = value;
|
||||
}
|
||||
|
||||
// Also read settings.json for current configuration
|
||||
const settingsPath = join(homedir(), '.codexlens', 'settings.json');
|
||||
let settings: Record<string, any> = {};
|
||||
try {
|
||||
const settingsContent = await readFile(settingsPath, 'utf-8');
|
||||
settings = JSON.parse(settingsContent);
|
||||
} catch {
|
||||
// Settings file doesn't exist or is invalid, use empty
|
||||
}
|
||||
|
||||
// Map settings to env var format for defaults
|
||||
const settingsDefaults: Record<string, string> = {};
|
||||
|
||||
// Embedding settings
|
||||
if (settings.embedding?.backend) {
|
||||
settingsDefaults['CODEXLENS_EMBEDDING_BACKEND'] = settings.embedding.backend;
|
||||
}
|
||||
if (settings.embedding?.model) {
|
||||
settingsDefaults['CODEXLENS_EMBEDDING_MODEL'] = settings.embedding.model;
|
||||
settingsDefaults['LITELLM_EMBEDDING_MODEL'] = settings.embedding.model;
|
||||
}
|
||||
if (settings.embedding?.use_gpu !== undefined) {
|
||||
settingsDefaults['CODEXLENS_USE_GPU'] = String(settings.embedding.use_gpu);
|
||||
}
|
||||
if (settings.embedding?.strategy) {
|
||||
settingsDefaults['CODEXLENS_EMBEDDING_STRATEGY'] = settings.embedding.strategy;
|
||||
}
|
||||
if (settings.embedding?.cooldown !== undefined) {
|
||||
settingsDefaults['CODEXLENS_EMBEDDING_COOLDOWN'] = String(settings.embedding.cooldown);
|
||||
}
|
||||
|
||||
// Reranker settings
|
||||
if (settings.reranker?.backend) {
|
||||
settingsDefaults['CODEXLENS_RERANKER_BACKEND'] = settings.reranker.backend;
|
||||
}
|
||||
if (settings.reranker?.model) {
|
||||
settingsDefaults['CODEXLENS_RERANKER_MODEL'] = settings.reranker.model;
|
||||
settingsDefaults['LITELLM_RERANKER_MODEL'] = settings.reranker.model;
|
||||
}
|
||||
if (settings.reranker?.enabled !== undefined) {
|
||||
settingsDefaults['CODEXLENS_RERANKER_ENABLED'] = String(settings.reranker.enabled);
|
||||
}
|
||||
if (settings.reranker?.top_k !== undefined) {
|
||||
settingsDefaults['CODEXLENS_RERANKER_TOP_K'] = String(settings.reranker.top_k);
|
||||
}
|
||||
|
||||
// API/Concurrency settings
|
||||
if (settings.api?.max_workers !== undefined) {
|
||||
settingsDefaults['CODEXLENS_API_MAX_WORKERS'] = String(settings.api.max_workers);
|
||||
}
|
||||
if (settings.api?.batch_size !== undefined) {
|
||||
settingsDefaults['CODEXLENS_API_BATCH_SIZE'] = String(settings.api.batch_size);
|
||||
}
|
||||
|
||||
// Cascade search settings
|
||||
if (settings.cascade?.strategy) {
|
||||
settingsDefaults['CODEXLENS_CASCADE_STRATEGY'] = settings.cascade.strategy;
|
||||
}
|
||||
if (settings.cascade?.coarse_k !== undefined) {
|
||||
settingsDefaults['CODEXLENS_CASCADE_COARSE_K'] = String(settings.cascade.coarse_k);
|
||||
}
|
||||
if (settings.cascade?.fine_k !== undefined) {
|
||||
settingsDefaults['CODEXLENS_CASCADE_FINE_K'] = String(settings.cascade.fine_k);
|
||||
}
|
||||
|
||||
// LLM settings
|
||||
if (settings.llm?.enabled !== undefined) {
|
||||
settingsDefaults['CODEXLENS_LLM_ENABLED'] = String(settings.llm.enabled);
|
||||
}
|
||||
if (settings.llm?.batch_size !== undefined) {
|
||||
settingsDefaults['CODEXLENS_LLM_BATCH_SIZE'] = String(settings.llm.batch_size);
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
path: envPath,
|
||||
env: envVars,
|
||||
raw: content,
|
||||
settings: settingsDefaults
|
||||
}));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Save global env file content (merge mode - preserves existing values)
|
||||
if (pathname === '/api/codexlens/env' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { env } = body as { env: Record<string, string> };
|
||||
|
||||
if (!env || typeof env !== 'object') {
|
||||
return { success: false, error: 'env object is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const { homedir } = await import('os');
|
||||
const { join, dirname } = await import('path');
|
||||
const { writeFile, mkdir, readFile } = await import('fs/promises');
|
||||
|
||||
const envPath = join(homedir(), '.codexlens', '.env');
|
||||
await mkdir(dirname(envPath), { recursive: true });
|
||||
|
||||
// Read existing env file to preserve custom variables
|
||||
let existingEnv: Record<string, string> = {};
|
||||
let existingComments: string[] = [];
|
||||
try {
|
||||
const content = await readFile(envPath, 'utf-8');
|
||||
const lines = content.split('\n');
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
// Preserve comment lines that aren't our headers
|
||||
if (trimmed.startsWith('#') && !trimmed.includes('Managed by CCW')) {
|
||||
if (!trimmed.includes('Reranker API') && !trimmed.includes('Embedding API') &&
|
||||
!trimmed.includes('LiteLLM Config') && !trimmed.includes('CodexLens Settings') &&
|
||||
!trimmed.includes('Other Settings') && !trimmed.includes('CodexLens Environment')) {
|
||||
existingComments.push(line);
|
||||
}
|
||||
}
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
// Robust parsing (same as GET handler)
|
||||
const eqIndex = trimmed.indexOf('=');
|
||||
if (eqIndex <= 0) continue;
|
||||
|
||||
const key = trimmed.substring(0, eqIndex).trim();
|
||||
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue;
|
||||
|
||||
let value = trimmed.substring(eqIndex + 1);
|
||||
if (value.startsWith('"')) {
|
||||
let end = 1;
|
||||
while (end < value.length) {
|
||||
if (value[end] === '"' && value[end - 1] !== '\\') break;
|
||||
end++;
|
||||
}
|
||||
value = value.substring(1, end).replace(/\\"/g, '"');
|
||||
} else if (value.startsWith("'")) {
|
||||
const end = value.indexOf("'", 1);
|
||||
value = end > 0 ? value.substring(1, end) : value.substring(1);
|
||||
} else {
|
||||
const commentIndex = value.indexOf(' #');
|
||||
if (commentIndex > 0) value = value.substring(0, commentIndex);
|
||||
value = value.trim();
|
||||
}
|
||||
existingEnv[key] = value;
|
||||
}
|
||||
} catch {
|
||||
// File doesn't exist, start fresh
|
||||
}
|
||||
|
||||
// Merge: update known keys from payload, preserve unknown keys
|
||||
const knownKeys = new Set([
|
||||
'RERANKER_API_KEY', 'RERANKER_API_BASE', 'RERANKER_MODEL',
|
||||
'EMBEDDING_API_KEY', 'EMBEDDING_API_BASE', 'EMBEDDING_MODEL',
|
||||
'LITELLM_API_KEY', 'LITELLM_API_BASE', 'LITELLM_MODEL'
|
||||
]);
|
||||
|
||||
// Apply updates from payload
|
||||
for (const [key, value] of Object.entries(env)) {
|
||||
if (value) {
|
||||
existingEnv[key] = value;
|
||||
} else if (knownKeys.has(key)) {
|
||||
// Remove known key if value is empty
|
||||
delete existingEnv[key];
|
||||
}
|
||||
}
|
||||
|
||||
// Build env file content
|
||||
const lines = [
|
||||
'# CodexLens Environment Configuration',
|
||||
'# Managed by CCW Dashboard',
|
||||
''
|
||||
];
|
||||
|
||||
// Add preserved custom comments
|
||||
if (existingComments.length > 0) {
|
||||
lines.push(...existingComments, '');
|
||||
}
|
||||
|
||||
// Group by prefix
|
||||
const groups: Record<string, string[]> = {
|
||||
'RERANKER': [],
|
||||
'EMBEDDING': [],
|
||||
'LITELLM': [],
|
||||
'CODEXLENS': [],
|
||||
'OTHER': []
|
||||
};
|
||||
|
||||
for (const [key, value] of Object.entries(existingEnv)) {
|
||||
if (!value) continue;
|
||||
// SECURITY: Escape special characters to prevent .env injection
|
||||
const escapedValue = value
|
||||
.replace(/\\/g, '\\\\') // Escape backslashes first
|
||||
.replace(/"/g, '\\"') // Escape double quotes
|
||||
.replace(/\n/g, '\\n') // Escape newlines
|
||||
.replace(/\r/g, '\\r'); // Escape carriage returns
|
||||
const line = `${key}="${escapedValue}"`;
|
||||
if (key.startsWith('RERANKER_')) groups['RERANKER'].push(line);
|
||||
else if (key.startsWith('EMBEDDING_')) groups['EMBEDDING'].push(line);
|
||||
else if (key.startsWith('LITELLM_')) groups['LITELLM'].push(line);
|
||||
else if (key.startsWith('CODEXLENS_')) groups['CODEXLENS'].push(line);
|
||||
else groups['OTHER'].push(line);
|
||||
}
|
||||
|
||||
// Add grouped content
|
||||
if (groups['RERANKER'].length) {
|
||||
lines.push('# Reranker API Configuration');
|
||||
lines.push(...groups['RERANKER'], '');
|
||||
}
|
||||
if (groups['EMBEDDING'].length) {
|
||||
lines.push('# Embedding API Configuration');
|
||||
lines.push(...groups['EMBEDDING'], '');
|
||||
}
|
||||
if (groups['LITELLM'].length) {
|
||||
lines.push('# LiteLLM Configuration');
|
||||
lines.push(...groups['LITELLM'], '');
|
||||
}
|
||||
if (groups['CODEXLENS'].length) {
|
||||
lines.push('# CodexLens Settings');
|
||||
lines.push(...groups['CODEXLENS'], '');
|
||||
}
|
||||
if (groups['OTHER'].length) {
|
||||
lines.push('# Other Settings');
|
||||
lines.push(...groups['OTHER'], '');
|
||||
}
|
||||
|
||||
await writeFile(envPath, lines.join('\n'), 'utf-8');
|
||||
|
||||
// Also update settings.json with mapped values
|
||||
const settingsPath = join(homedir(), '.codexlens', 'settings.json');
|
||||
let settings: Record<string, any> = {};
|
||||
try {
|
||||
const settingsContent = await readFile(settingsPath, 'utf-8');
|
||||
settings = JSON.parse(settingsContent);
|
||||
} catch {
|
||||
// File doesn't exist, create default structure
|
||||
settings = { embedding: {}, reranker: {}, api: {}, cascade: {}, llm: {} };
|
||||
}
|
||||
|
||||
// Map env vars to settings.json structure
|
||||
const envToSettings: Record<string, { path: string[], transform?: (v: string) => any }> = {
|
||||
'CODEXLENS_EMBEDDING_BACKEND': { path: ['embedding', 'backend'] },
|
||||
'CODEXLENS_EMBEDDING_MODEL': { path: ['embedding', 'model'] },
|
||||
'CODEXLENS_USE_GPU': { path: ['embedding', 'use_gpu'], transform: v => v === 'true' },
|
||||
'CODEXLENS_EMBEDDING_STRATEGY': { path: ['embedding', 'strategy'] },
|
||||
'CODEXLENS_EMBEDDING_COOLDOWN': { path: ['embedding', 'cooldown'], transform: v => parseFloat(v) },
|
||||
'CODEXLENS_RERANKER_BACKEND': { path: ['reranker', 'backend'] },
|
||||
'CODEXLENS_RERANKER_MODEL': { path: ['reranker', 'model'] },
|
||||
'CODEXLENS_RERANKER_ENABLED': { path: ['reranker', 'enabled'], transform: v => v === 'true' },
|
||||
'CODEXLENS_RERANKER_TOP_K': { path: ['reranker', 'top_k'], transform: v => parseInt(v, 10) },
|
||||
'CODEXLENS_API_MAX_WORKERS': { path: ['api', 'max_workers'], transform: v => parseInt(v, 10) },
|
||||
'CODEXLENS_API_BATCH_SIZE': { path: ['api', 'batch_size'], transform: v => parseInt(v, 10) },
|
||||
'CODEXLENS_CASCADE_STRATEGY': { path: ['cascade', 'strategy'] },
|
||||
'CODEXLENS_CASCADE_COARSE_K': { path: ['cascade', 'coarse_k'], transform: v => parseInt(v, 10) },
|
||||
'CODEXLENS_CASCADE_FINE_K': { path: ['cascade', 'fine_k'], transform: v => parseInt(v, 10) },
|
||||
'CODEXLENS_LLM_ENABLED': { path: ['llm', 'enabled'], transform: v => v === 'true' },
|
||||
'CODEXLENS_LLM_BATCH_SIZE': { path: ['llm', 'batch_size'], transform: v => parseInt(v, 10) },
|
||||
'LITELLM_EMBEDDING_MODEL': { path: ['embedding', 'model'] },
|
||||
'LITELLM_RERANKER_MODEL': { path: ['reranker', 'model'] }
|
||||
};
|
||||
|
||||
// Apply env vars to settings
|
||||
for (const [envKey, value] of Object.entries(env)) {
|
||||
const mapping = envToSettings[envKey];
|
||||
if (mapping && value) {
|
||||
const [section, key] = mapping.path;
|
||||
if (!settings[section]) settings[section] = {};
|
||||
settings[section][key] = mapping.transform ? mapping.transform(value) : value;
|
||||
}
|
||||
}
|
||||
|
||||
// Write updated settings
|
||||
await writeFile(settingsPath, JSON.stringify(settings, null, 2), 'utf-8');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Environment and settings configuration saved',
|
||||
path: envPath,
|
||||
settingsPath
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
335
ccw/src/core/routes/codexlens/index-handlers.ts
Normal file
335
ccw/src/core/routes/codexlens/index-handlers.ts
Normal file
@@ -0,0 +1,335 @@
|
||||
/**
|
||||
* CodexLens index management handlers.
|
||||
*/
|
||||
|
||||
import {
|
||||
cancelIndexing,
|
||||
checkVenvStatus,
|
||||
ensureLiteLLMEmbedderReady,
|
||||
executeCodexLens,
|
||||
isIndexingInProgress,
|
||||
} from '../../../tools/codex-lens.js';
|
||||
import type { ProgressInfo } from '../../../tools/codex-lens.js';
|
||||
import type { RouteContext } from '../types.js';
|
||||
import { extractJSON, formatSize } from './utils.js';
|
||||
|
||||
/**
|
||||
* Handle CodexLens index routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleCodexLensIndexRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx;
|
||||
|
||||
// API: CodexLens Index List - Get all indexed projects with details
|
||||
if (pathname === '/api/codexlens/indexes') {
|
||||
try {
|
||||
// Check if CodexLens is installed first (without auto-installing)
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, indexes: [], totalSize: 0, totalSizeFormatted: '0 B' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
// Execute all CLI commands in parallel
|
||||
const [configResult, projectsResult, statusResult] = await Promise.all([
|
||||
executeCodexLens(['config', '--json']),
|
||||
executeCodexLens(['projects', 'list', '--json']),
|
||||
executeCodexLens(['status', '--json'])
|
||||
]);
|
||||
|
||||
let indexDir = '';
|
||||
if (configResult.success) {
|
||||
try {
|
||||
const config = extractJSON(configResult.output ?? '');
|
||||
if (config.success && config.result) {
|
||||
// CLI returns index_dir (not index_root)
|
||||
indexDir = config.result.index_dir || config.result.index_root || '';
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse config for index list:', e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
let indexes: any[] = [];
|
||||
let totalSize = 0;
|
||||
let vectorIndexCount = 0;
|
||||
let normalIndexCount = 0;
|
||||
|
||||
if (projectsResult.success) {
|
||||
try {
|
||||
const projectsData = extractJSON(projectsResult.output ?? '');
|
||||
if (projectsData.success && Array.isArray(projectsData.result)) {
|
||||
const { stat, readdir } = await import('fs/promises');
|
||||
const { existsSync } = await import('fs');
|
||||
const { basename, join } = await import('path');
|
||||
|
||||
for (const project of projectsData.result) {
|
||||
// Skip test/temp projects
|
||||
if (project.source_root && (
|
||||
project.source_root.includes('\\Temp\\') ||
|
||||
project.source_root.includes('/tmp/') ||
|
||||
project.total_files === 0
|
||||
)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let projectSize = 0;
|
||||
let hasVectorIndex = false;
|
||||
let hasNormalIndex = true; // All projects have FTS index
|
||||
let lastModified = null;
|
||||
|
||||
// Try to get actual index size from index_root
|
||||
if (project.index_root && existsSync(project.index_root)) {
|
||||
try {
|
||||
const files = await readdir(project.index_root);
|
||||
for (const file of files) {
|
||||
try {
|
||||
const filePath = join(project.index_root, file);
|
||||
const fileStat = await stat(filePath);
|
||||
projectSize += fileStat.size;
|
||||
if (!lastModified || fileStat.mtime > lastModified) {
|
||||
lastModified = fileStat.mtime;
|
||||
}
|
||||
// Check for vector/embedding files
|
||||
if (file.includes('vector') || file.includes('embedding') ||
|
||||
file.endsWith('.faiss') || file.endsWith('.npy') ||
|
||||
file.includes('semantic_chunks')) {
|
||||
hasVectorIndex = true;
|
||||
}
|
||||
} catch {
|
||||
// Skip files we can't stat
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Can't read index directory
|
||||
}
|
||||
}
|
||||
|
||||
if (hasVectorIndex) vectorIndexCount++;
|
||||
if (hasNormalIndex) normalIndexCount++;
|
||||
totalSize += projectSize;
|
||||
|
||||
// Use source_root as the display name
|
||||
const displayName = project.source_root ? basename(project.source_root) : `project_${project.id}`;
|
||||
|
||||
indexes.push({
|
||||
id: displayName,
|
||||
path: project.source_root || '',
|
||||
indexPath: project.index_root || '',
|
||||
size: projectSize,
|
||||
sizeFormatted: formatSize(projectSize),
|
||||
fileCount: project.total_files || 0,
|
||||
dirCount: project.total_dirs || 0,
|
||||
hasVectorIndex,
|
||||
hasNormalIndex,
|
||||
status: project.status || 'active',
|
||||
lastModified: lastModified ? lastModified.toISOString() : null
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by file count (most files first), then by name
|
||||
indexes.sort((a, b) => {
|
||||
if (b.fileCount !== a.fileCount) return b.fileCount - a.fileCount;
|
||||
return a.id.localeCompare(b.id);
|
||||
});
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse projects list:', e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
// Parse summary stats from status command (already fetched in parallel)
|
||||
let statusSummary: any = {};
|
||||
|
||||
if (statusResult.success) {
|
||||
try {
|
||||
const status = extractJSON(statusResult.output ?? '');
|
||||
if (status.success && status.result) {
|
||||
statusSummary = {
|
||||
totalProjects: status.result.projects_count || indexes.length,
|
||||
totalFiles: status.result.total_files || 0,
|
||||
totalDirs: status.result.total_dirs || 0,
|
||||
// Keep calculated totalSize for consistency with per-project sizes
|
||||
// status.index_size_bytes includes shared resources (models, cache)
|
||||
indexSizeBytes: totalSize,
|
||||
indexSizeMb: totalSize / (1024 * 1024),
|
||||
embeddings: status.result.embeddings || {},
|
||||
// Store full index dir size separately for reference
|
||||
fullIndexDirSize: status.result.index_size_bytes || 0,
|
||||
fullIndexDirSizeFormatted: formatSize(status.result.index_size_bytes || 0)
|
||||
};
|
||||
}
|
||||
} catch (e: unknown) {
|
||||
console.error('[CodexLens] Failed to parse status:', e instanceof Error ? e.message : String(e));
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
indexDir,
|
||||
indexes,
|
||||
summary: {
|
||||
totalProjects: indexes.length,
|
||||
totalSize,
|
||||
totalSizeFormatted: formatSize(totalSize),
|
||||
vectorIndexCount,
|
||||
normalIndexCount,
|
||||
...statusSummary
|
||||
}
|
||||
}));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Clean (Clean indexes)
|
||||
if (pathname === '/api/codexlens/clean' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { all = false, path } = body as { all?: unknown; path?: unknown };
|
||||
|
||||
try {
|
||||
const args = ['clean'];
|
||||
if (all === true) {
|
||||
args.push('--all');
|
||||
} else if (typeof path === 'string' && path.trim().length > 0) {
|
||||
// Path is passed as a positional argument, not as a flag
|
||||
args.push(path);
|
||||
}
|
||||
args.push('--json');
|
||||
|
||||
const result = await executeCodexLens(args);
|
||||
if (result.success) {
|
||||
return { success: true, message: 'Indexes cleaned successfully' };
|
||||
} else {
|
||||
return { success: false, error: result.error || 'Failed to clean indexes', status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Init (Initialize workspace index)
|
||||
if (pathname === '/api/codexlens/init' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: projectPath, indexType = 'vector', embeddingModel = 'code', embeddingBackend = 'fastembed', maxWorkers = 1 } = body as {
|
||||
path?: unknown;
|
||||
indexType?: unknown;
|
||||
embeddingModel?: unknown;
|
||||
embeddingBackend?: unknown;
|
||||
maxWorkers?: unknown;
|
||||
};
|
||||
const targetPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath;
|
||||
const resolvedIndexType = indexType === 'normal' ? 'normal' : 'vector';
|
||||
const resolvedEmbeddingModel = typeof embeddingModel === 'string' && embeddingModel.trim().length > 0 ? embeddingModel : 'code';
|
||||
const resolvedEmbeddingBackend = typeof embeddingBackend === 'string' && embeddingBackend.trim().length > 0 ? embeddingBackend : 'fastembed';
|
||||
const resolvedMaxWorkers = typeof maxWorkers === 'number' ? maxWorkers : Number(maxWorkers);
|
||||
|
||||
// Ensure LiteLLM backend dependencies are installed before running the CLI
|
||||
if (resolvedIndexType !== 'normal' && resolvedEmbeddingBackend === 'litellm') {
|
||||
const installResult = await ensureLiteLLMEmbedderReady();
|
||||
if (!installResult.success) {
|
||||
return { success: false, error: installResult.error || 'Failed to prepare LiteLLM embedder', status: 500 };
|
||||
}
|
||||
}
|
||||
|
||||
// Build CLI arguments based on index type
|
||||
// Use 'index init' subcommand (new CLI structure)
|
||||
const args = ['index', 'init', targetPath, '--json'];
|
||||
if (resolvedIndexType === 'normal') {
|
||||
args.push('--no-embeddings');
|
||||
} else {
|
||||
// Add embedding model selection for vector index (use --model, not --embedding-model)
|
||||
args.push('--model', resolvedEmbeddingModel);
|
||||
// Add embedding backend if not using default fastembed (use --backend, not --embedding-backend)
|
||||
if (resolvedEmbeddingBackend && resolvedEmbeddingBackend !== 'fastembed') {
|
||||
args.push('--backend', resolvedEmbeddingBackend);
|
||||
}
|
||||
// Add max workers for concurrent API calls (useful for litellm backend)
|
||||
if (!Number.isNaN(resolvedMaxWorkers) && resolvedMaxWorkers > 1) {
|
||||
args.push('--max-workers', String(resolvedMaxWorkers));
|
||||
}
|
||||
}
|
||||
|
||||
// Broadcast start event
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { stage: 'start', message: 'Starting index...', percent: 0, path: targetPath, indexType: resolvedIndexType }
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(args, {
|
||||
cwd: targetPath,
|
||||
timeout: 1800000, // 30 minutes for large codebases
|
||||
onProgress: (progress: ProgressInfo) => {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { ...progress, path: targetPath }
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (result.success) {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { stage: 'complete', message: 'Index complete', percent: 100, path: targetPath }
|
||||
});
|
||||
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, result: parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output ?? '' };
|
||||
}
|
||||
} else {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { stage: 'error', message: result.error || 'Unknown error', percent: 0, path: targetPath }
|
||||
});
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { stage: 'error', message, percent: 0, path: targetPath }
|
||||
});
|
||||
return { success: false, error: message, status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Cancel CodexLens Indexing
|
||||
if (pathname === '/api/codexlens/cancel' && req.method === 'POST') {
|
||||
const result = cancelIndexing();
|
||||
|
||||
// Broadcast cancellation event
|
||||
if (result.success) {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_INDEX_PROGRESS',
|
||||
payload: { stage: 'cancelled', message: 'Indexing cancelled by user', percent: 0 }
|
||||
});
|
||||
}
|
||||
|
||||
res.writeHead(result.success ? 200 : 400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Check if indexing is in progress
|
||||
if (pathname === '/api/codexlens/indexing-status') {
|
||||
const inProgress = isIndexingInProgress();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, inProgress }));
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
767
ccw/src/core/routes/codexlens/semantic-handlers.ts
Normal file
767
ccw/src/core/routes/codexlens/semantic-handlers.ts
Normal file
@@ -0,0 +1,767 @@
|
||||
/**
|
||||
* CodexLens semantic search + reranker + SPLADE handlers.
|
||||
*/
|
||||
|
||||
import {
|
||||
checkSemanticStatus,
|
||||
checkVenvStatus,
|
||||
executeCodexLens,
|
||||
installSemantic,
|
||||
} from '../../../tools/codex-lens.js';
|
||||
import type { GpuMode } from '../../../tools/codex-lens.js';
|
||||
import { loadLiteLLMApiConfig } from '../../../config/litellm-api-config-manager.js';
|
||||
import type { RouteContext } from '../types.js';
|
||||
import { extractJSON } from './utils.js';
|
||||
|
||||
export async function handleCodexLensSemanticRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, url, req, res, initialPath, handlePostRequest } = ctx;
|
||||
|
||||
// API: CodexLens Semantic Search Status
|
||||
if (pathname === '/api/codexlens/semantic/status') {
|
||||
const status = await checkSemanticStatus();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(status));
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Semantic Metadata List
|
||||
if (pathname === '/api/codexlens/semantic/metadata') {
|
||||
const offset = parseInt(url.searchParams.get('offset') || '0', 10);
|
||||
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||
const tool = url.searchParams.get('tool') || '';
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
try {
|
||||
const args = [
|
||||
'semantic-list',
|
||||
'--path', projectPath,
|
||||
'--offset', offset.toString(),
|
||||
'--limit', limit.toString(),
|
||||
'--json'
|
||||
];
|
||||
if (tool) {
|
||||
args.push('--tool', tool);
|
||||
}
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(result.output ?? '');
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens LLM Enhancement (run enhance command)
|
||||
if (pathname === '/api/codexlens/enhance' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: projectPath, tool = 'gemini', batchSize = 5, timeoutMs = 300000 } = body as {
|
||||
path?: unknown;
|
||||
tool?: unknown;
|
||||
batchSize?: unknown;
|
||||
timeoutMs?: unknown;
|
||||
};
|
||||
const targetPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath;
|
||||
const resolvedTool = typeof tool === 'string' && tool.trim().length > 0 ? tool : 'gemini';
|
||||
const resolvedBatchSize = typeof batchSize === 'number' ? batchSize : Number(batchSize);
|
||||
const resolvedTimeoutMs = typeof timeoutMs === 'number' ? timeoutMs : Number(timeoutMs);
|
||||
|
||||
try {
|
||||
const args = ['enhance', targetPath, '--tool', resolvedTool, '--batch-size', String(resolvedBatchSize)];
|
||||
const timeout = !Number.isNaN(resolvedTimeoutMs) ? resolvedTimeoutMs + 30000 : 330000;
|
||||
const result = await executeCodexLens(args, { cwd: targetPath, timeout });
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, result: parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output ?? '' };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Search (FTS5 text search with mode support)
|
||||
if (pathname === '/api/codexlens/search') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector
|
||||
const maxContentLength = parseInt(url.searchParams.get('max_content_length') || '200', 10);
|
||||
const extraFilesCount = parseInt(url.searchParams.get('extra_files_count') || '10', 10);
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Query parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
// Request more results to support split (full content + extra files)
|
||||
const totalToFetch = limit + extraFilesCount;
|
||||
// Use --method instead of deprecated --mode
|
||||
const args = ['search', query, '--path', projectPath, '--limit', totalToFetch.toString(), '--method', mode, '--json'];
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
const allResults = parsed.result?.results || [];
|
||||
|
||||
// Truncate content and split results
|
||||
const truncateContent = (content: string | null | undefined): string => {
|
||||
if (!content) return '';
|
||||
if (content.length <= maxContentLength) return content;
|
||||
return content.slice(0, maxContentLength) + '...';
|
||||
};
|
||||
|
||||
// Split results: first N with full content, rest as file paths only
|
||||
const resultsWithContent = allResults.slice(0, limit).map((r: any) => ({
|
||||
...r,
|
||||
content: truncateContent(r.content || r.excerpt),
|
||||
excerpt: truncateContent(r.excerpt || r.content),
|
||||
}));
|
||||
|
||||
const extraResults = allResults.slice(limit, limit + extraFilesCount);
|
||||
const extraFiles = [...new Set(extraResults.map((r: any) => r.path || r.file))];
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
results: resultsWithContent,
|
||||
extra_files: extraFiles.length > 0 ? extraFiles : undefined,
|
||||
metadata: {
|
||||
total: allResults.length,
|
||||
limit,
|
||||
max_content_length: maxContentLength,
|
||||
extra_files_count: extraFilesCount,
|
||||
},
|
||||
}));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, results: [], output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Search Files Only (return file paths only, with mode support)
|
||||
if (pathname === '/api/codexlens/search_files') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Query parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
// Use --method instead of deprecated --mode
|
||||
const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--method', mode, '--files-only', '--json'];
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...parsed.result }));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, files: [], output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Symbol Search (search for symbols by name)
|
||||
if (pathname === '/api/codexlens/symbol') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const file = url.searchParams.get('file');
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query && !file) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Either query or file parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
let args;
|
||||
if (file) {
|
||||
// Get symbols from a specific file
|
||||
args = ['symbol', '--file', file, '--json'];
|
||||
} else {
|
||||
// Search for symbols by name
|
||||
args = ['symbol', query, '--path', projectPath, '--limit', limit.toString(), '--json'];
|
||||
}
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...parsed.result }));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, symbols: [], output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Semantic Search Install (with GPU mode support)
|
||||
if (pathname === '/api/codexlens/semantic/install' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
try {
|
||||
// Get GPU mode from request body, default to 'cpu'
|
||||
const { gpuMode } = body as { gpuMode?: unknown };
|
||||
const resolvedGpuModeCandidate = typeof gpuMode === 'string' && gpuMode.trim().length > 0 ? gpuMode : 'cpu';
|
||||
const validModes: GpuMode[] = ['cpu', 'cuda', 'directml'];
|
||||
|
||||
if (!validModes.includes(resolvedGpuModeCandidate as GpuMode)) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Invalid GPU mode: ${resolvedGpuModeCandidate}. Valid modes: ${validModes.join(', ')}`,
|
||||
status: 400
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedGpuMode = resolvedGpuModeCandidate as GpuMode;
|
||||
const result = await installSemantic(resolvedGpuMode);
|
||||
if (result.success) {
|
||||
const status = await checkSemanticStatus();
|
||||
const modeDescriptions = {
|
||||
cpu: 'CPU (ONNX Runtime)',
|
||||
cuda: 'NVIDIA CUDA GPU',
|
||||
directml: 'Windows DirectML GPU'
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: `Semantic search installed successfully with ${modeDescriptions[resolvedGpuMode]}`,
|
||||
gpuMode: resolvedGpuMode,
|
||||
...status
|
||||
};
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// RERANKER CONFIGURATION ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
// API: Get Reranker Configuration
|
||||
if (pathname === '/api/codexlens/reranker/config' && req.method === 'GET') {
|
||||
try {
|
||||
const venvStatus = await checkVenvStatus();
|
||||
|
||||
// Default reranker config (matches fastembed default)
|
||||
const rerankerConfig = {
|
||||
backend: 'fastembed',
|
||||
model_name: 'Xenova/ms-marco-MiniLM-L-6-v2',
|
||||
api_provider: 'siliconflow',
|
||||
api_key_set: false,
|
||||
available_backends: ['onnx', 'api', 'litellm', 'legacy'],
|
||||
api_providers: ['siliconflow', 'cohere', 'jina'],
|
||||
litellm_endpoints: [] as string[],
|
||||
config_source: 'default'
|
||||
};
|
||||
|
||||
// Load LiteLLM endpoints for dropdown
|
||||
try {
|
||||
const litellmConfig = loadLiteLLMApiConfig(initialPath);
|
||||
if (litellmConfig.endpoints && Array.isArray(litellmConfig.endpoints)) {
|
||||
rerankerConfig.litellm_endpoints = litellmConfig.endpoints.map(
|
||||
(ep: any) => ep.alias || ep.name || ep.baseUrl
|
||||
).filter(Boolean);
|
||||
}
|
||||
} catch {
|
||||
// LiteLLM config not available, continue with empty endpoints
|
||||
}
|
||||
|
||||
// If CodexLens is installed, try to get actual config
|
||||
if (venvStatus.ready) {
|
||||
try {
|
||||
const result = await executeCodexLens(['config', '--json']);
|
||||
if (result.success) {
|
||||
const config = extractJSON(result.output ?? '');
|
||||
if (config.success && config.result) {
|
||||
// Map config values
|
||||
if (config.result.reranker_backend) {
|
||||
rerankerConfig.backend = config.result.reranker_backend;
|
||||
rerankerConfig.config_source = 'codexlens';
|
||||
}
|
||||
if (config.result.reranker_model) {
|
||||
rerankerConfig.model_name = config.result.reranker_model;
|
||||
}
|
||||
if (config.result.reranker_api_provider) {
|
||||
rerankerConfig.api_provider = config.result.reranker_api_provider;
|
||||
}
|
||||
// Check if API key is set (from env)
|
||||
if (process.env.RERANKER_API_KEY) {
|
||||
rerankerConfig.api_key_set = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[CodexLens] Failed to get reranker config:', e);
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...rerankerConfig }));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Set Reranker Configuration
|
||||
if (pathname === '/api/codexlens/reranker/config' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { backend, model_name, api_provider, api_key, litellm_endpoint } = body as {
|
||||
backend?: unknown;
|
||||
model_name?: unknown;
|
||||
api_provider?: unknown;
|
||||
api_key?: unknown;
|
||||
litellm_endpoint?: unknown;
|
||||
};
|
||||
const resolvedBackend = typeof backend === 'string' && backend.trim().length > 0 ? backend : undefined;
|
||||
const resolvedModelName = typeof model_name === 'string' && model_name.trim().length > 0 ? model_name : undefined;
|
||||
const resolvedApiProvider = typeof api_provider === 'string' && api_provider.trim().length > 0 ? api_provider : undefined;
|
||||
const resolvedApiKey = typeof api_key === 'string' && api_key.trim().length > 0 ? api_key : undefined;
|
||||
const resolvedLiteLLMEndpoint =
|
||||
typeof litellm_endpoint === 'string' && litellm_endpoint.trim().length > 0 ? litellm_endpoint : undefined;
|
||||
|
||||
// Validate backend
|
||||
const validBackends = ['onnx', 'api', 'litellm', 'legacy', 'fastembed'];
|
||||
if (resolvedBackend && !validBackends.includes(resolvedBackend)) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Invalid backend: ${resolvedBackend}. Valid options: ${validBackends.join(', ')}`,
|
||||
status: 400
|
||||
};
|
||||
}
|
||||
|
||||
// Validate api_provider
|
||||
const validProviders = ['siliconflow', 'cohere', 'jina'];
|
||||
if (resolvedApiProvider && !validProviders.includes(resolvedApiProvider)) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Invalid api_provider: ${resolvedApiProvider}. Valid options: ${validProviders.join(', ')}`,
|
||||
status: 400
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const updates: string[] = [];
|
||||
|
||||
// Set backend
|
||||
if (resolvedBackend) {
|
||||
const result = await executeCodexLens(['config', 'set', 'reranker_backend', resolvedBackend, '--json']);
|
||||
if (result.success) updates.push('backend');
|
||||
}
|
||||
|
||||
// Set model
|
||||
if (resolvedModelName) {
|
||||
const result = await executeCodexLens(['config', 'set', 'reranker_model', resolvedModelName, '--json']);
|
||||
if (result.success) updates.push('model_name');
|
||||
}
|
||||
|
||||
// Set API provider
|
||||
if (resolvedApiProvider) {
|
||||
const result = await executeCodexLens(['config', 'set', 'reranker_api_provider', resolvedApiProvider, '--json']);
|
||||
if (result.success) updates.push('api_provider');
|
||||
}
|
||||
|
||||
// Set LiteLLM endpoint
|
||||
if (resolvedLiteLLMEndpoint) {
|
||||
const result = await executeCodexLens([
|
||||
'config',
|
||||
'set',
|
||||
'reranker_litellm_endpoint',
|
||||
resolvedLiteLLMEndpoint,
|
||||
'--json'
|
||||
]);
|
||||
if (result.success) updates.push('litellm_endpoint');
|
||||
}
|
||||
|
||||
// Handle API key - write to .env file or environment
|
||||
if (resolvedApiKey) {
|
||||
// For security, we store in process.env for the current session
|
||||
// In production, this should be written to a secure .env file
|
||||
process.env.RERANKER_API_KEY = resolvedApiKey;
|
||||
updates.push('api_key');
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Updated: ${updates.join(', ')}`,
|
||||
updated_fields: updates
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// RERANKER MODEL MANAGEMENT ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
// API: List Reranker Models (list available reranker models)
|
||||
if (pathname === '/api/codexlens/reranker/models' && req.method === 'GET') {
|
||||
try {
|
||||
// Check if CodexLens is installed first
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'CodexLens not installed' }));
|
||||
return true;
|
||||
}
|
||||
const result = await executeCodexLens(['reranker-model-list', '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Download Reranker Model (download reranker model by profile)
|
||||
if (pathname === '/api/codexlens/reranker/models/download' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body as { profile?: unknown };
|
||||
const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined;
|
||||
|
||||
if (!resolvedProfile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['reranker-model-download', resolvedProfile, '--json'], { timeout: 600000 }); // 10 min for download
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Delete Reranker Model (delete reranker model by profile)
|
||||
if (pathname === '/api/codexlens/reranker/models/delete' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body as { profile?: unknown };
|
||||
const resolvedProfile = typeof profile === 'string' && profile.trim().length > 0 ? profile.trim() : undefined;
|
||||
|
||||
if (!resolvedProfile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['reranker-model-delete', resolvedProfile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Reranker Model Info (get reranker model info by profile)
|
||||
if (pathname === '/api/codexlens/reranker/models/info' && req.method === 'GET') {
|
||||
const profile = url.searchParams.get('profile');
|
||||
|
||||
if (!profile) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'profile parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['reranker-model-info', profile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output ?? '');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Failed to parse response' }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// SPLADE ENDPOINTS
|
||||
// ============================================================
|
||||
|
||||
// API: SPLADE Status - Check if SPLADE is available and installed
|
||||
if (pathname === '/api/codexlens/splade/status') {
|
||||
try {
|
||||
// Check if CodexLens is installed first
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
available: false,
|
||||
installed: false,
|
||||
model: 'naver/splade-cocondenser-ensembledistil',
|
||||
error: 'CodexLens not installed'
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check SPLADE availability using Python check
|
||||
const result = await executeCodexLens(['python', '-c',
|
||||
'from codexlens.semantic.splade_encoder import check_splade_available; ok, err = check_splade_available(); print(\"OK\" if ok else err)'
|
||||
]);
|
||||
|
||||
const output = result.output ?? '';
|
||||
const available = output.includes('OK');
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
available,
|
||||
installed: available,
|
||||
model: 'naver/splade-cocondenser-ensembledistil',
|
||||
error: available ? null : output.trim()
|
||||
}));
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
available: false,
|
||||
installed: false,
|
||||
model: 'naver/splade-cocondenser-ensembledistil',
|
||||
error: err instanceof Error ? err.message : String(err)
|
||||
}));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: SPLADE Install - Install SPLADE dependencies
|
||||
if (pathname === '/api/codexlens/splade/install' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
try {
|
||||
const { gpu } = body as { gpu?: unknown };
|
||||
const useGpu = typeof gpu === 'boolean' ? gpu : false;
|
||||
const packageName = useGpu ? 'codex-lens[splade-gpu]' : 'codex-lens[splade]';
|
||||
|
||||
// Use pip to install the SPLADE extras
|
||||
const { promisify } = await import('util');
|
||||
const execFilePromise = promisify(require('child_process').execFile);
|
||||
|
||||
const result = await execFilePromise('pip', ['install', packageName], {
|
||||
timeout: 600000 // 10 minutes
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `SPLADE installed successfully (${useGpu ? 'GPU' : 'CPU'} mode)`,
|
||||
output: result.stdout
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const stderr = (err as { stderr?: unknown })?.stderr;
|
||||
return {
|
||||
success: false,
|
||||
error: message,
|
||||
stderr: typeof stderr === 'string' ? stderr : undefined,
|
||||
status: 500
|
||||
};
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: SPLADE Index Status - Check if SPLADE index exists for a project
|
||||
if (pathname === '/api/codexlens/splade/index-status') {
|
||||
try {
|
||||
const projectPath = url.searchParams.get('path');
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Missing path parameter' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if CodexLens is installed first
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ exists: false, error: 'CodexLens not installed' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const { join } = await import('path');
|
||||
const indexDb = join(projectPath, '.codexlens', '_index.db');
|
||||
|
||||
// Use Python to check SPLADE index status
|
||||
const pythonCode = `
|
||||
from codexlens.storage.splade_index import SpladeIndex
|
||||
from pathlib import Path
|
||||
try:
|
||||
idx = SpladeIndex(Path(\"${indexDb.replace(/\\\\/g, '\\\\\\\\')}\"))
|
||||
if idx.has_index():
|
||||
stats = idx.get_stats()
|
||||
meta = idx.get_metadata()
|
||||
model = meta.get('model_name', '') if meta else ''
|
||||
print(f\"OK|{stats['unique_chunks']}|{stats['total_postings']}|{model}\")
|
||||
else:
|
||||
print(\"NO_INDEX\")
|
||||
except Exception as e:
|
||||
print(f\"ERROR|{str(e)}\")
|
||||
`;
|
||||
|
||||
const result = await executeCodexLens(['python', '-c', pythonCode]);
|
||||
|
||||
const output = result.output ?? '';
|
||||
if (output.startsWith('OK|')) {
|
||||
const parts = output.trim().split('|');
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
exists: true,
|
||||
chunks: parseInt(parts[1]),
|
||||
postings: parseInt(parts[2]),
|
||||
model: parts[3]
|
||||
}));
|
||||
} else if (output.startsWith('ERROR|')) {
|
||||
const errorMsg = output.substring(6).trim();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ exists: false, error: errorMsg }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ exists: false }));
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ exists: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: SPLADE Index Rebuild - Rebuild SPLADE index for a project
|
||||
if (pathname === '/api/codexlens/splade/rebuild' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: projectPath } = body as { path?: unknown };
|
||||
const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : undefined;
|
||||
|
||||
if (!resolvedProjectPath) {
|
||||
return { success: false, error: 'Missing path parameter', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
// Use 'index splade' instead of deprecated 'splade-index'
|
||||
const result = await executeCodexLens(['index', 'splade', resolvedProjectPath, '--rebuild'], {
|
||||
cwd: resolvedProjectPath,
|
||||
timeout: 1800000 // 30 minutes for large codebases
|
||||
});
|
||||
|
||||
if (result.success) {
|
||||
return {
|
||||
success: true,
|
||||
message: 'SPLADE index rebuilt successfully',
|
||||
output: result.output
|
||||
};
|
||||
} else {
|
||||
return {
|
||||
success: false,
|
||||
error: result.error || 'Failed to rebuild SPLADE index',
|
||||
output: result.output,
|
||||
status: 500
|
||||
};
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
96
ccw/src/core/routes/codexlens/utils.ts
Normal file
96
ccw/src/core/routes/codexlens/utils.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
/**
|
||||
* CodexLens route utilities.
|
||||
*
|
||||
* CodexLens CLI can emit logging + ANSI escapes even with --json, so helpers
|
||||
* here normalize output for reliable JSON parsing.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Strip ANSI color codes from string.
|
||||
* Rich library adds color codes even with --json flag.
|
||||
*/
|
||||
export function stripAnsiCodes(str: string): string {
|
||||
// ANSI escape code pattern: \x1b[...m or \x1b]...
|
||||
return str.replace(/\x1b\[[0-9;]*m/g, '')
|
||||
.replace(/\x1b\][0-9;]*\x07/g, '')
|
||||
.replace(/\x1b\][^\x07]*\x07/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Format file size to human readable string.
|
||||
*/
|
||||
export function formatSize(bytes: number): string {
|
||||
if (bytes === 0) return '0 B';
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
const k = 1024;
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||
const size = parseFloat((bytes / Math.pow(k, i)).toFixed(i < 2 ? 0 : 1));
|
||||
return size + ' ' + units[i];
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract JSON from CLI output that may contain logging messages.
|
||||
* CodexLens CLI outputs logs like "INFO ..." before the JSON.
|
||||
* Also strips ANSI color codes that Rich library adds.
|
||||
* Handles trailing content after JSON (e.g., "INFO: Done" messages).
|
||||
*/
|
||||
export function extractJSON(output: string): any {
|
||||
// Strip ANSI color codes first
|
||||
const cleanOutput = stripAnsiCodes(output);
|
||||
|
||||
// Find the first { or [ character (start of JSON)
|
||||
const jsonStart = cleanOutput.search(/[{\[]/);
|
||||
if (jsonStart === -1) {
|
||||
throw new Error('No JSON found in output');
|
||||
}
|
||||
|
||||
const startChar = cleanOutput[jsonStart];
|
||||
const endChar = startChar === '{' ? '}' : ']';
|
||||
|
||||
// Find matching closing brace/bracket using a simple counter
|
||||
let depth = 0;
|
||||
let inString = false;
|
||||
let escapeNext = false;
|
||||
let jsonEnd = -1;
|
||||
|
||||
for (let i = jsonStart; i < cleanOutput.length; i++) {
|
||||
const char = cleanOutput[i];
|
||||
|
||||
if (escapeNext) {
|
||||
escapeNext = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '\\' && inString) {
|
||||
escapeNext = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '"') {
|
||||
inString = !inString;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inString) {
|
||||
if (char === startChar) {
|
||||
depth++;
|
||||
} else if (char === endChar) {
|
||||
depth--;
|
||||
if (depth === 0) {
|
||||
jsonEnd = i + 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (jsonEnd === -1) {
|
||||
// Fallback: try to parse from start to end (original behavior)
|
||||
const jsonString = cleanOutput.substring(jsonStart);
|
||||
return JSON.parse(jsonString);
|
||||
}
|
||||
|
||||
const jsonString = cleanOutput.substring(jsonStart, jsonEnd);
|
||||
return JSON.parse(jsonString);
|
||||
}
|
||||
|
||||
265
ccw/src/core/routes/codexlens/watcher-handlers.ts
Normal file
265
ccw/src/core/routes/codexlens/watcher-handlers.ts
Normal file
@@ -0,0 +1,265 @@
|
||||
/**
|
||||
* CodexLens file watcher handlers.
|
||||
*
|
||||
* Maintains watcher process state across requests to support dashboard controls.
|
||||
*/
|
||||
|
||||
import {
|
||||
checkVenvStatus,
|
||||
executeCodexLens,
|
||||
getVenvPythonPath,
|
||||
} from '../../../tools/codex-lens.js';
|
||||
import type { RouteContext } from '../types.js';
|
||||
import { extractJSON, stripAnsiCodes } from './utils.js';
|
||||
|
||||
// File watcher state (persisted across requests)
|
||||
let watcherProcess: any = null;
|
||||
let watcherStats = {
|
||||
running: false,
|
||||
root_path: '',
|
||||
events_processed: 0,
|
||||
start_time: null as Date | null
|
||||
};
|
||||
|
||||
export async function stopWatcherForUninstall(): Promise<void> {
|
||||
if (!watcherStats.running || !watcherProcess) return;
|
||||
|
||||
try {
|
||||
watcherProcess.kill('SIGTERM');
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
if (watcherProcess && !watcherProcess.killed) {
|
||||
watcherProcess.kill('SIGKILL');
|
||||
}
|
||||
} catch {
|
||||
// Ignore errors stopping watcher
|
||||
}
|
||||
|
||||
watcherStats = {
|
||||
running: false,
|
||||
root_path: '',
|
||||
events_processed: 0,
|
||||
start_time: null
|
||||
};
|
||||
watcherProcess = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle CodexLens watcher routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleCodexLensWatcherRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, req, res, initialPath, handlePostRequest, broadcastToClients } = ctx;
|
||||
|
||||
// API: Get File Watcher Status
|
||||
if (pathname === '/api/codexlens/watch/status') {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
running: watcherStats.running,
|
||||
root_path: watcherStats.root_path,
|
||||
events_processed: watcherStats.events_processed,
|
||||
start_time: watcherStats.start_time?.toISOString() || null,
|
||||
uptime_seconds: watcherStats.start_time
|
||||
? Math.floor((Date.now() - watcherStats.start_time.getTime()) / 1000)
|
||||
: 0
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Start File Watcher
|
||||
if (pathname === '/api/codexlens/watch/start' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: watchPath, debounce_ms = 1000 } = body as { path?: unknown; debounce_ms?: unknown };
|
||||
const targetPath = typeof watchPath === 'string' && watchPath.trim().length > 0 ? watchPath : initialPath;
|
||||
const resolvedDebounceMs = typeof debounce_ms === 'number' ? debounce_ms : Number(debounce_ms);
|
||||
const debounceMs = !Number.isNaN(resolvedDebounceMs) && resolvedDebounceMs > 0 ? resolvedDebounceMs : 1000;
|
||||
|
||||
if (watcherStats.running) {
|
||||
return { success: false, error: 'Watcher already running', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const { spawn } = await import('child_process');
|
||||
const { existsSync, statSync } = await import('fs');
|
||||
|
||||
// Validate path exists and is a directory
|
||||
if (!existsSync(targetPath)) {
|
||||
return { success: false, error: `Path does not exist: ${targetPath}`, status: 400 };
|
||||
}
|
||||
const pathStat = statSync(targetPath);
|
||||
if (!pathStat.isDirectory()) {
|
||||
return { success: false, error: `Path is not a directory: ${targetPath}`, status: 400 };
|
||||
}
|
||||
|
||||
// Get the codexlens CLI path
|
||||
const venvStatus = await checkVenvStatus();
|
||||
if (!venvStatus.ready) {
|
||||
return { success: false, error: 'CodexLens not installed', status: 400 };
|
||||
}
|
||||
|
||||
// Verify directory is indexed before starting watcher
|
||||
try {
|
||||
const statusResult = await executeCodexLens(['projects', 'list', '--json']);
|
||||
if (statusResult.success && statusResult.output) {
|
||||
const parsed = extractJSON(statusResult.output);
|
||||
const projects = parsed.result || parsed || [];
|
||||
const normalizedTarget = targetPath.toLowerCase().replace(/\\/g, '/');
|
||||
const isIndexed = Array.isArray(projects) && projects.some((p: { source_root?: string }) =>
|
||||
p.source_root && p.source_root.toLowerCase().replace(/\\/g, '/') === normalizedTarget
|
||||
);
|
||||
if (!isIndexed) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Directory is not indexed: ${targetPath}. Run 'codexlens init' first.`,
|
||||
status: 400
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[CodexLens] Could not verify index status:', err);
|
||||
// Continue anyway - watcher will fail with proper error if not indexed
|
||||
}
|
||||
|
||||
// Spawn watch process using Python (no shell: true for security)
|
||||
// CodexLens is a Python package, must run via python -m codexlens
|
||||
const pythonPath = getVenvPythonPath();
|
||||
const args = ['-m', 'codexlens', 'watch', targetPath, '--debounce', String(debounceMs)];
|
||||
watcherProcess = spawn(pythonPath, args, {
|
||||
cwd: targetPath,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
env: { ...process.env }
|
||||
});
|
||||
|
||||
watcherStats = {
|
||||
running: true,
|
||||
root_path: targetPath,
|
||||
events_processed: 0,
|
||||
start_time: new Date()
|
||||
};
|
||||
|
||||
// Capture stderr for error messages (capped at 4KB to prevent memory leak)
|
||||
const MAX_STDERR_SIZE = 4096;
|
||||
let stderrBuffer = '';
|
||||
if (watcherProcess.stderr) {
|
||||
watcherProcess.stderr.on('data', (data: Buffer) => {
|
||||
stderrBuffer += data.toString();
|
||||
// Cap buffer size to prevent memory leak in long-running watchers
|
||||
if (stderrBuffer.length > MAX_STDERR_SIZE) {
|
||||
stderrBuffer = stderrBuffer.slice(-MAX_STDERR_SIZE);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle process output for event counting
|
||||
if (watcherProcess.stdout) {
|
||||
watcherProcess.stdout.on('data', (data: Buffer) => {
|
||||
const output = data.toString();
|
||||
// Count processed events from output
|
||||
const matches = output.match(/Processed \d+ events?/g);
|
||||
if (matches) {
|
||||
watcherStats.events_processed += matches.length;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Handle spawn errors (e.g., ENOENT)
|
||||
watcherProcess.on('error', (err: Error) => {
|
||||
console.error(`[CodexLens] Watcher spawn error: ${err.message}`);
|
||||
watcherStats.running = false;
|
||||
watcherProcess = null;
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_WATCHER_STATUS',
|
||||
payload: { running: false, error: `Spawn error: ${err.message}` }
|
||||
});
|
||||
});
|
||||
|
||||
// Handle process exit
|
||||
watcherProcess.on('exit', (code: number) => {
|
||||
watcherStats.running = false;
|
||||
watcherProcess = null;
|
||||
console.log(`[CodexLens] Watcher exited with code ${code}`);
|
||||
|
||||
// Broadcast error if exited with non-zero code
|
||||
if (code !== 0) {
|
||||
const errorMsg = stderrBuffer.trim() || `Exited with code ${code}`;
|
||||
const cleanError = stripAnsiCodes(errorMsg);
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_WATCHER_STATUS',
|
||||
payload: { running: false, error: cleanError }
|
||||
});
|
||||
} else {
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_WATCHER_STATUS',
|
||||
payload: { running: false }
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Broadcast watcher started
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_WATCHER_STATUS',
|
||||
payload: { running: true, path: targetPath }
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Watcher started',
|
||||
path: targetPath,
|
||||
pid: watcherProcess.pid
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Stop File Watcher
|
||||
if (pathname === '/api/codexlens/watch/stop' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async () => {
|
||||
if (!watcherStats.running || !watcherProcess) {
|
||||
return { success: false, error: 'Watcher not running', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
watcherProcess.kill('SIGTERM');
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
if (watcherProcess && !watcherProcess.killed) {
|
||||
watcherProcess.kill('SIGKILL');
|
||||
}
|
||||
|
||||
const finalStats = {
|
||||
events_processed: watcherStats.events_processed,
|
||||
uptime_seconds: watcherStats.start_time
|
||||
? Math.floor((Date.now() - watcherStats.start_time.getTime()) / 1000)
|
||||
: 0
|
||||
};
|
||||
|
||||
watcherStats = {
|
||||
running: false,
|
||||
root_path: '',
|
||||
events_processed: 0,
|
||||
start_time: null
|
||||
};
|
||||
watcherProcess = null;
|
||||
|
||||
broadcastToClients({
|
||||
type: 'CODEXLENS_WATCHER_STATUS',
|
||||
payload: { running: false }
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: 'Watcher stopped',
|
||||
...finalStats
|
||||
};
|
||||
} catch (err: unknown) {
|
||||
return { success: false, error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Discovery Routes Module
|
||||
*
|
||||
@@ -24,19 +23,9 @@
|
||||
* - PATCH /api/discoveries/:id/findings/:fid - Update finding status
|
||||
* - DELETE /api/discoveries/:id - Delete discovery session
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, existsSync, writeFileSync, mkdirSync, readdirSync, rmSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========== Helper Functions ==========
|
||||
|
||||
|
||||
@@ -1,21 +1,11 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Files Routes Module
|
||||
* Handles all file browsing related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { existsSync, readFileSync, readdirSync, statSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import { validatePath as validateAllowedPath } from '../../utils/path-validator.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========================================
|
||||
// Constants
|
||||
@@ -78,6 +68,39 @@ const EXT_TO_LANGUAGE = {
|
||||
'.svelte': 'html'
|
||||
};
|
||||
|
||||
interface ExplorerFileEntry {
|
||||
name: string;
|
||||
type: 'directory' | 'file';
|
||||
path: string;
|
||||
hasClaudeMd?: boolean;
|
||||
}
|
||||
|
||||
interface ExplorerDirectoryFilesResult {
|
||||
path?: string;
|
||||
files: ExplorerFileEntry[];
|
||||
gitignorePatterns?: string[];
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ExplorerFileContentResult {
|
||||
error?: string;
|
||||
content?: string;
|
||||
language?: string;
|
||||
isMarkdown?: boolean;
|
||||
fileName?: string;
|
||||
path?: string;
|
||||
size?: number;
|
||||
lines?: number;
|
||||
}
|
||||
|
||||
interface UpdateClaudeMdResult {
|
||||
success?: boolean;
|
||||
error?: string;
|
||||
message?: string;
|
||||
output?: string;
|
||||
path?: string;
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Helper Functions
|
||||
// ========================================
|
||||
@@ -87,7 +110,7 @@ const EXT_TO_LANGUAGE = {
|
||||
* @param {string} gitignorePath - Path to .gitignore file
|
||||
* @returns {string[]} Array of gitignore patterns
|
||||
*/
|
||||
function parseGitignore(gitignorePath) {
|
||||
function parseGitignore(gitignorePath: string): string[] {
|
||||
try {
|
||||
if (!existsSync(gitignorePath)) return [];
|
||||
const content = readFileSync(gitignorePath, 'utf8');
|
||||
@@ -108,7 +131,7 @@ function parseGitignore(gitignorePath) {
|
||||
* @param {boolean} isDirectory - Whether the entry is a directory
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function shouldIgnore(name, patterns, isDirectory) {
|
||||
function shouldIgnore(name: string, patterns: string[], isDirectory: boolean): boolean {
|
||||
// Always exclude certain directories
|
||||
if (isDirectory && EXPLORER_EXCLUDE_DIRS.includes(name)) {
|
||||
return true;
|
||||
@@ -155,7 +178,7 @@ function shouldIgnore(name, patterns, isDirectory) {
|
||||
* @param {string} dirPath - Directory path to list
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function listDirectoryFiles(dirPath) {
|
||||
async function listDirectoryFiles(dirPath: string): Promise<ExplorerDirectoryFilesResult> {
|
||||
try {
|
||||
// Normalize path
|
||||
let normalizedPath = dirPath.replace(/\\/g, '/');
|
||||
@@ -178,7 +201,7 @@ async function listDirectoryFiles(dirPath) {
|
||||
// Read directory entries
|
||||
const entries = readdirSync(normalizedPath, { withFileTypes: true });
|
||||
|
||||
const files = [];
|
||||
const files: ExplorerFileEntry[] = [];
|
||||
for (const entry of entries) {
|
||||
const isDirectory = entry.isDirectory();
|
||||
|
||||
@@ -188,7 +211,7 @@ async function listDirectoryFiles(dirPath) {
|
||||
}
|
||||
|
||||
const entryPath = join(normalizedPath, entry.name);
|
||||
const fileInfo = {
|
||||
const fileInfo: ExplorerFileEntry = {
|
||||
name: entry.name,
|
||||
type: isDirectory ? 'directory' : 'file',
|
||||
path: entryPath.replace(/\\/g, '/')
|
||||
@@ -226,7 +249,7 @@ async function listDirectoryFiles(dirPath) {
|
||||
* @param {string} filePath - Path to file
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function getFileContent(filePath) {
|
||||
async function getFileContent(filePath: string): Promise<ExplorerFileContentResult> {
|
||||
try {
|
||||
// Normalize path
|
||||
let normalizedPath = filePath.replace(/\\/g, '/');
|
||||
@@ -251,9 +274,11 @@ async function getFileContent(filePath) {
|
||||
// Read file content
|
||||
const content = readFileSync(normalizedPath, 'utf8');
|
||||
const ext = normalizedPath.substring(normalizedPath.lastIndexOf('.')).toLowerCase();
|
||||
const language = EXT_TO_LANGUAGE[ext] || 'plaintext';
|
||||
const language = Object.prototype.hasOwnProperty.call(EXT_TO_LANGUAGE, ext)
|
||||
? EXT_TO_LANGUAGE[ext as keyof typeof EXT_TO_LANGUAGE]
|
||||
: 'plaintext';
|
||||
const isMarkdown = ext === '.md' || ext === '.markdown';
|
||||
const fileName = normalizedPath.split('/').pop();
|
||||
const fileName = normalizedPath.split('/').pop() ?? normalizedPath;
|
||||
|
||||
return {
|
||||
content,
|
||||
@@ -277,7 +302,7 @@ async function getFileContent(filePath) {
|
||||
* @param {string} strategy - Update strategy (single-layer, multi-layer)
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function triggerUpdateClaudeMd(targetPath, tool, strategy) {
|
||||
async function triggerUpdateClaudeMd(targetPath: string, tool: string, strategy: string): Promise<UpdateClaudeMdResult> {
|
||||
const { spawn } = await import('child_process');
|
||||
|
||||
// Normalize path
|
||||
@@ -303,7 +328,7 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) {
|
||||
|
||||
console.log(`[Explorer] Running async: ccw tool exec update_module_claude with ${tool} (${strategy})`);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
return new Promise<UpdateClaudeMdResult>((resolve) => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
// Spawn the process
|
||||
@@ -316,34 +341,39 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) {
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
child.stdout.on('data', (data) => {
|
||||
child.stdout.on('data', (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
child.stderr.on('data', (data) => {
|
||||
child.stderr.on('data', (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
child.on('close', (code: number | null) => {
|
||||
if (code === 0) {
|
||||
// Parse the JSON output from the tool
|
||||
let result;
|
||||
let result: unknown;
|
||||
try {
|
||||
result = JSON.parse(stdout);
|
||||
} catch {
|
||||
result = { output: stdout };
|
||||
}
|
||||
|
||||
if (result.success === false || result.error) {
|
||||
const parsed = typeof result === 'object' && result !== null ? (result as Record<string, unknown>) : null;
|
||||
const parsedSuccess = typeof parsed?.success === 'boolean' ? parsed.success : undefined;
|
||||
const parsedError = typeof parsed?.error === 'string' ? parsed.error : undefined;
|
||||
const parsedMessage = typeof parsed?.message === 'string' ? parsed.message : undefined;
|
||||
|
||||
if (parsedSuccess === false || parsedError) {
|
||||
resolve({
|
||||
success: false,
|
||||
error: result.error || result.message || 'Update failed',
|
||||
error: parsedError || parsedMessage || 'Update failed',
|
||||
output: stdout
|
||||
});
|
||||
} else {
|
||||
resolve({
|
||||
success: true,
|
||||
message: result.message || `CLAUDE.md updated successfully using ${tool} (${strategy})`,
|
||||
message: parsedMessage || `CLAUDE.md updated successfully using ${tool} (${strategy})`,
|
||||
output: stdout,
|
||||
path: normalizedPath
|
||||
});
|
||||
@@ -357,11 +387,11 @@ async function triggerUpdateClaudeMd(targetPath, tool, strategy) {
|
||||
}
|
||||
});
|
||||
|
||||
child.on('error', (error) => {
|
||||
child.on('error', (error: unknown) => {
|
||||
console.error('Error spawning process:', error);
|
||||
resolve({
|
||||
success: false,
|
||||
error: (error as Error).message,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
output: ''
|
||||
});
|
||||
});
|
||||
@@ -392,9 +422,19 @@ export async function handleFilesRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: List directory files with .gitignore filtering (Explorer view)
|
||||
if (pathname === '/api/files') {
|
||||
const dirPath = url.searchParams.get('path') || initialPath;
|
||||
const filesData = await listDirectoryFiles(dirPath);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(filesData));
|
||||
|
||||
try {
|
||||
const validatedDir = await validateAllowedPath(dirPath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
const filesData = await listDirectoryFiles(validatedDir);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(filesData));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Files] Path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path', files: [] }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -406,20 +446,52 @@ export async function handleFilesRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
res.end(JSON.stringify({ error: 'File path is required' }));
|
||||
return true;
|
||||
}
|
||||
const fileData = await getFileContent(filePath);
|
||||
res.writeHead(fileData.error ? 404 : 200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(fileData));
|
||||
|
||||
try {
|
||||
const validatedFile = await validateAllowedPath(filePath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
const fileData = await getFileContent(validatedFile);
|
||||
res.writeHead(fileData.error ? 404 : 200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(fileData));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Files] Path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Update CLAUDE.md using CLI tools (Explorer view)
|
||||
if (pathname === '/api/update-claude-md' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { path: targetPath, tool = 'gemini', strategy = 'single-layer' } = body;
|
||||
if (!targetPath) {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const {
|
||||
path: targetPath,
|
||||
tool = 'gemini',
|
||||
strategy = 'single-layer'
|
||||
} = body as { path?: unknown; tool?: unknown; strategy?: unknown };
|
||||
|
||||
if (typeof targetPath !== 'string' || targetPath.trim().length === 0) {
|
||||
return { error: 'path is required', status: 400 };
|
||||
}
|
||||
return await triggerUpdateClaudeMd(targetPath, tool, strategy);
|
||||
|
||||
try {
|
||||
const validatedPath = await validateAllowedPath(targetPath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
return await triggerUpdateClaudeMd(
|
||||
validatedPath,
|
||||
typeof tool === 'string' ? tool : 'gemini',
|
||||
typeof strategy === 'string' ? strategy : 'single-layer'
|
||||
);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Files] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -2,21 +2,12 @@
|
||||
* Graph Routes Module
|
||||
* Handles graph visualization API endpoints for codex-lens data
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { homedir } from 'os';
|
||||
import { join, resolve, normalize } from 'path';
|
||||
import { existsSync, readdirSync } from 'fs';
|
||||
import Database from 'better-sqlite3';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import { validatePath as validateAllowedPath } from '../../utils/path-validator.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
/**
|
||||
* PathMapper utility class (simplified from codex-lens Python implementation)
|
||||
@@ -82,28 +73,22 @@ interface ImpactAnalysis {
|
||||
* Validate and sanitize project path to prevent path traversal attacks
|
||||
* @returns sanitized absolute path or null if invalid
|
||||
*/
|
||||
function validateProjectPath(projectPath: string, initialPath: string): string | null {
|
||||
if (!projectPath) {
|
||||
return initialPath;
|
||||
type ProjectPathValidationResult =
|
||||
| { path: string; status: 200 }
|
||||
| { path: null; status: number; error: string };
|
||||
|
||||
async function validateProjectPath(projectPath: string, initialPath: string): Promise<ProjectPathValidationResult> {
|
||||
const candidate = projectPath || initialPath;
|
||||
|
||||
try {
|
||||
const validated = await validateAllowedPath(candidate, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
return { path: validated, status: 200 };
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Graph] Project path validation failed: ${message}`);
|
||||
return { path: null, status, error: status === 403 ? 'Access denied' : 'Invalid project path' };
|
||||
}
|
||||
|
||||
// Resolve to absolute path
|
||||
const resolved = resolve(projectPath);
|
||||
const normalized = normalize(resolved);
|
||||
|
||||
// Check for path traversal attempts
|
||||
if (normalized.includes('..') || normalized !== resolved) {
|
||||
console.error(`[Graph] Path traversal attempt blocked: ${projectPath}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Ensure path exists and is a directory
|
||||
if (!existsSync(normalized)) {
|
||||
console.error(`[Graph] Path does not exist: ${normalized}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -440,18 +425,20 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Graph Nodes - Get all symbols as graph nodes
|
||||
if (pathname === '/api/graph/nodes') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const projectPathResult = await validateProjectPath(rawPath, initialPath);
|
||||
const limitStr = url.searchParams.get('limit') || '1000';
|
||||
const limit = Math.min(parseInt(limitStr, 10) || 1000, 5000); // Max 5000 nodes
|
||||
const fileFilter = url.searchParams.get('file') || undefined;
|
||||
const moduleFilter = url.searchParams.get('module') || undefined;
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid project path', nodes: [] }));
|
||||
if (projectPathResult.path === null) {
|
||||
res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: projectPathResult.error, nodes: [] }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const projectPath = projectPathResult.path;
|
||||
|
||||
try {
|
||||
const allNodes = await querySymbols(projectPath, fileFilter, moduleFilter);
|
||||
const nodes = allNodes.slice(0, limit);
|
||||
@@ -474,18 +461,20 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Graph Edges - Get all relationships as graph edges
|
||||
if (pathname === '/api/graph/edges') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const projectPathResult = await validateProjectPath(rawPath, initialPath);
|
||||
const limitStr = url.searchParams.get('limit') || '2000';
|
||||
const limit = Math.min(parseInt(limitStr, 10) || 2000, 10000); // Max 10000 edges
|
||||
const fileFilter = url.searchParams.get('file') || undefined;
|
||||
const moduleFilter = url.searchParams.get('module') || undefined;
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid project path', edges: [] }));
|
||||
if (projectPathResult.path === null) {
|
||||
res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: projectPathResult.error, edges: [] }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const projectPath = projectPathResult.path;
|
||||
|
||||
try {
|
||||
const allEdges = await queryRelationships(projectPath, fileFilter, moduleFilter);
|
||||
const edges = allEdges.slice(0, limit);
|
||||
@@ -508,14 +497,16 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Get available files and modules for filtering
|
||||
if (pathname === '/api/graph/files') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const projectPathResult = await validateProjectPath(rawPath, initialPath);
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid project path', files: [], modules: [] }));
|
||||
if (projectPathResult.path === null) {
|
||||
res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: projectPathResult.error, files: [], modules: [] }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const projectPath = projectPathResult.path;
|
||||
|
||||
try {
|
||||
const mapper = new PathMapper();
|
||||
const rootDbPath = mapper.sourceToIndexDb(projectPath);
|
||||
@@ -570,15 +561,17 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Impact Analysis - Get impact analysis for a symbol
|
||||
if (pathname === '/api/graph/impact') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const projectPathResult = await validateProjectPath(rawPath, initialPath);
|
||||
const symbolId = url.searchParams.get('symbol');
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid project path', directDependents: [], affectedFiles: [] }));
|
||||
if (projectPathResult.path === null) {
|
||||
res.writeHead(projectPathResult.status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: projectPathResult.error, directDependents: [], affectedFiles: [] }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const projectPath = projectPathResult.path;
|
||||
|
||||
if (!symbolId) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'symbol parameter is required', directDependents: [], affectedFiles: [] }));
|
||||
|
||||
@@ -1,22 +1,11 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Help Routes Module
|
||||
* Handles all Help-related API endpoints for command guide and CodexLens docs
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, existsSync, watch } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========== In-Memory Cache ==========
|
||||
interface CacheEntry {
|
||||
@@ -101,6 +90,7 @@ function initializeFileWatchers(): void {
|
||||
});
|
||||
|
||||
watchersInitialized = true;
|
||||
(watcher as any).unref?.();
|
||||
console.log(`File watchers initialized for: ${indexDir}`);
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize file watchers:', error);
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Hooks Routes Module
|
||||
* Handles all hooks-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
||||
import { join, dirname } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
interface HooksRouteContext extends RouteContext {
|
||||
extractSessionIdFromPath: (filePath: string) => string | null;
|
||||
}
|
||||
|
||||
@@ -30,7 +23,7 @@ const GLOBAL_SETTINGS_PATH = join(homedir(), '.claude', 'settings.json');
|
||||
* @param {string} projectPath
|
||||
* @returns {string}
|
||||
*/
|
||||
function getProjectSettingsPath(projectPath) {
|
||||
function getProjectSettingsPath(projectPath: string): string {
|
||||
// path.join automatically handles cross-platform path separators
|
||||
return join(projectPath, '.claude', 'settings.json');
|
||||
}
|
||||
@@ -40,7 +33,7 @@ function getProjectSettingsPath(projectPath) {
|
||||
* @param {string} filePath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function readSettingsFile(filePath) {
|
||||
function readSettingsFile(filePath: string): Record<string, unknown> {
|
||||
try {
|
||||
if (!existsSync(filePath)) {
|
||||
return {};
|
||||
@@ -58,7 +51,7 @@ function readSettingsFile(filePath) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getHooksConfig(projectPath) {
|
||||
function getHooksConfig(projectPath: string): { global: { path: string; hooks: unknown }; project: { path: string | null; hooks: unknown } } {
|
||||
const globalSettings = readSettingsFile(GLOBAL_SETTINGS_PATH);
|
||||
const projectSettingsPath = projectPath ? getProjectSettingsPath(projectPath) : null;
|
||||
const projectSettings = projectSettingsPath ? readSettingsFile(projectSettingsPath) : {};
|
||||
@@ -66,11 +59,11 @@ function getHooksConfig(projectPath) {
|
||||
return {
|
||||
global: {
|
||||
path: GLOBAL_SETTINGS_PATH,
|
||||
hooks: globalSettings.hooks || {}
|
||||
hooks: (globalSettings as { hooks?: unknown }).hooks || {}
|
||||
},
|
||||
project: {
|
||||
path: projectSettingsPath,
|
||||
hooks: projectSettings.hooks || {}
|
||||
hooks: (projectSettings as { hooks?: unknown }).hooks || {}
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -83,15 +76,18 @@ function getHooksConfig(projectPath) {
|
||||
* @param {Object} hookData - Hook configuration
|
||||
* @returns {Object}
|
||||
*/
|
||||
function saveHookToSettings(projectPath, scope, event, hookData) {
|
||||
function saveHookToSettings(
|
||||
projectPath: string,
|
||||
scope: 'global' | 'project',
|
||||
event: string,
|
||||
hookData: Record<string, unknown> & { replaceIndex?: unknown }
|
||||
): Record<string, unknown> {
|
||||
try {
|
||||
const filePath = scope === 'global' ? GLOBAL_SETTINGS_PATH : getProjectSettingsPath(projectPath);
|
||||
const settings = readSettingsFile(filePath);
|
||||
const settings = readSettingsFile(filePath) as Record<string, unknown> & { hooks?: Record<string, unknown> };
|
||||
|
||||
// Ensure hooks object exists
|
||||
if (!settings.hooks) {
|
||||
settings.hooks = {};
|
||||
}
|
||||
settings.hooks = settings.hooks || {};
|
||||
|
||||
// Ensure the event array exists
|
||||
if (!settings.hooks[event]) {
|
||||
@@ -104,15 +100,16 @@ function saveHookToSettings(projectPath, scope, event, hookData) {
|
||||
}
|
||||
|
||||
// Check if we're replacing an existing hook
|
||||
if (hookData.replaceIndex !== undefined) {
|
||||
if (typeof hookData.replaceIndex === 'number') {
|
||||
const index = hookData.replaceIndex;
|
||||
delete hookData.replaceIndex;
|
||||
if (index >= 0 && index < settings.hooks[event].length) {
|
||||
settings.hooks[event][index] = hookData;
|
||||
const hooksForEvent = settings.hooks[event] as unknown[];
|
||||
if (index >= 0 && index < hooksForEvent.length) {
|
||||
hooksForEvent[index] = hookData;
|
||||
}
|
||||
} else {
|
||||
// Add new hook
|
||||
settings.hooks[event].push(hookData);
|
||||
(settings.hooks[event] as unknown[]).push(hookData);
|
||||
}
|
||||
|
||||
// Ensure directory exists and write file
|
||||
@@ -141,10 +138,15 @@ function saveHookToSettings(projectPath, scope, event, hookData) {
|
||||
* @param {number} hookIndex - Index of hook to delete
|
||||
* @returns {Object}
|
||||
*/
|
||||
function deleteHookFromSettings(projectPath, scope, event, hookIndex) {
|
||||
function deleteHookFromSettings(
|
||||
projectPath: string,
|
||||
scope: 'global' | 'project',
|
||||
event: string,
|
||||
hookIndex: number
|
||||
): Record<string, unknown> {
|
||||
try {
|
||||
const filePath = scope === 'global' ? GLOBAL_SETTINGS_PATH : getProjectSettingsPath(projectPath);
|
||||
const settings = readSettingsFile(filePath);
|
||||
const settings = readSettingsFile(filePath) as Record<string, unknown> & { hooks?: Record<string, unknown> };
|
||||
|
||||
if (!settings.hooks || !settings.hooks[event]) {
|
||||
return { error: 'Hook not found' };
|
||||
@@ -155,15 +157,17 @@ function deleteHookFromSettings(projectPath, scope, event, hookIndex) {
|
||||
settings.hooks[event] = [settings.hooks[event]];
|
||||
}
|
||||
|
||||
if (hookIndex < 0 || hookIndex >= settings.hooks[event].length) {
|
||||
const hooksForEvent = settings.hooks[event] as unknown[];
|
||||
|
||||
if (hookIndex < 0 || hookIndex >= hooksForEvent.length) {
|
||||
return { error: 'Invalid hook index' };
|
||||
}
|
||||
|
||||
// Remove the hook
|
||||
settings.hooks[event].splice(hookIndex, 1);
|
||||
hooksForEvent.splice(hookIndex, 1);
|
||||
|
||||
// Remove empty event arrays
|
||||
if (settings.hooks[event].length === 0) {
|
||||
if (hooksForEvent.length === 0) {
|
||||
delete settings.hooks[event];
|
||||
}
|
||||
|
||||
@@ -197,18 +201,29 @@ function deleteHookFromSettings(projectPath, scope, event, hookIndex) {
|
||||
* Handle hooks routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
export async function handleHooksRoutes(ctx: HooksRouteContext): Promise<boolean> {
|
||||
const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients, extractSessionIdFromPath } = ctx;
|
||||
|
||||
// API: Hook endpoint for Claude Code notifications
|
||||
if (pathname === '/api/hook' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { type, filePath, sessionId, ...extraData } = body;
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const payload = body as Record<string, unknown>;
|
||||
const type = payload.type;
|
||||
const filePath = payload.filePath;
|
||||
const sessionId = payload.sessionId;
|
||||
const extraData: Record<string, unknown> = { ...payload };
|
||||
delete extraData.type;
|
||||
delete extraData.filePath;
|
||||
delete extraData.sessionId;
|
||||
|
||||
// Determine session ID from file path if not provided
|
||||
let resolvedSessionId = sessionId;
|
||||
if (!resolvedSessionId && filePath) {
|
||||
resolvedSessionId = extractSessionIdFromPath(filePath);
|
||||
let resolvedSessionId = typeof sessionId === 'string' ? sessionId : undefined;
|
||||
if (!resolvedSessionId && typeof filePath === 'string') {
|
||||
resolvedSessionId = extractSessionIdFromPath(filePath) ?? undefined;
|
||||
}
|
||||
|
||||
// Handle context hooks (session-start, context)
|
||||
@@ -226,7 +241,7 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const index = await clusteringService.getProgressiveIndex({
|
||||
type: type as 'session-start' | 'context',
|
||||
sessionId: resolvedSessionId,
|
||||
prompt: extraData.prompt // Pass user prompt for intent matching
|
||||
prompt: typeof extraData.prompt === 'string' ? extraData.prompt : undefined // Pass user prompt for intent matching
|
||||
});
|
||||
|
||||
// Return context directly
|
||||
@@ -253,10 +268,10 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
|
||||
// Broadcast to all connected WebSocket clients
|
||||
const notification = {
|
||||
type: type || 'session_updated',
|
||||
type: typeof type === 'string' && type.trim().length > 0 ? type : 'session_updated',
|
||||
payload: {
|
||||
sessionId: resolvedSessionId,
|
||||
filePath: filePath,
|
||||
filePath: typeof filePath === 'string' ? filePath : undefined,
|
||||
timestamp: new Date().toISOString(),
|
||||
...extraData // Pass through toolName, status, result, params, error, etc.
|
||||
}
|
||||
@@ -365,7 +380,7 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Get hooks configuration
|
||||
if (pathname === '/api/hooks' && req.method === 'GET') {
|
||||
const projectPathParam = url.searchParams.get('path');
|
||||
const hooksData = getHooksConfig(projectPathParam);
|
||||
const hooksData = getHooksConfig(projectPathParam || initialPath);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(hooksData));
|
||||
return true;
|
||||
@@ -374,11 +389,23 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Save hook
|
||||
if (pathname === '/api/hooks' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath, scope, event, hookData } = body;
|
||||
if (!scope || !event || !hookData) {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const { projectPath, scope, event, hookData } = body as {
|
||||
projectPath?: unknown;
|
||||
scope?: unknown;
|
||||
event?: unknown;
|
||||
hookData?: unknown;
|
||||
};
|
||||
|
||||
if ((scope !== 'global' && scope !== 'project') || typeof event !== 'string' || typeof hookData !== 'object' || hookData === null) {
|
||||
return { error: 'scope, event, and hookData are required', status: 400 };
|
||||
}
|
||||
return saveHookToSettings(projectPath, scope, event, hookData);
|
||||
|
||||
const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath;
|
||||
return saveHookToSettings(resolvedProjectPath, scope, event, hookData as Record<string, unknown>);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
@@ -386,11 +413,23 @@ export async function handleHooksRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Delete hook
|
||||
if (pathname === '/api/hooks' && req.method === 'DELETE') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath, scope, event, hookIndex } = body;
|
||||
if (!scope || !event || hookIndex === undefined) {
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const { projectPath, scope, event, hookIndex } = body as {
|
||||
projectPath?: unknown;
|
||||
scope?: unknown;
|
||||
event?: unknown;
|
||||
hookIndex?: unknown;
|
||||
};
|
||||
|
||||
if ((scope !== 'global' && scope !== 'project') || typeof event !== 'string' || typeof hookIndex !== 'number') {
|
||||
return { error: 'scope, event, and hookIndex are required', status: 400 };
|
||||
}
|
||||
return deleteHookFromSettings(projectPath, scope, event, hookIndex);
|
||||
|
||||
const resolvedProjectPath = typeof projectPath === 'string' && projectPath.trim().length > 0 ? projectPath : initialPath;
|
||||
return deleteHookFromSettings(resolvedProjectPath, scope, event, hookIndex);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Issue Routes Module (Optimized - Flat JSONL Storage)
|
||||
*
|
||||
@@ -23,19 +22,9 @@
|
||||
* - GET /api/queue - Get execution queue
|
||||
* - POST /api/queue/reorder - Reorder queue items
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, existsSync, writeFileSync, mkdirSync, unlinkSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========== JSONL Helper Functions ==========
|
||||
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* LiteLLM API Routes Module
|
||||
* Handles LiteLLM provider management, endpoint configuration, and cache management
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join as pathJoin } from 'path';
|
||||
import { getSystemPython } from '../../utils/python-utils.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// Get current module path for package-relative lookups
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
@@ -66,14 +65,20 @@ export function clearCcwLitellmStatusCache() {
|
||||
ccwLitellmStatusCache.timestamp = 0;
|
||||
}
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
function sanitizeProviderForResponse(provider: any): any {
|
||||
if (!provider) return provider;
|
||||
return {
|
||||
...provider,
|
||||
apiKey: '***',
|
||||
apiKeys: Array.isArray(provider.apiKeys)
|
||||
? provider.apiKeys.map((entry: any) => ({ ...entry, key: '***' }))
|
||||
: provider.apiKeys,
|
||||
};
|
||||
}
|
||||
|
||||
function sanitizeRotationEndpointForResponse(endpoint: any): any {
|
||||
if (!endpoint) return endpoint;
|
||||
return { ...endpoint, api_key: '***' };
|
||||
}
|
||||
|
||||
// ===========================
|
||||
@@ -83,11 +88,11 @@ export interface RouteContext {
|
||||
interface ModelInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
provider: ProviderType;
|
||||
provider: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
const PROVIDER_MODELS: Record<ProviderType, ModelInfo[]> = {
|
||||
const PROVIDER_MODELS: Record<string, ModelInfo[]> = {
|
||||
openai: [
|
||||
{ id: 'gpt-4-turbo', name: 'GPT-4 Turbo', provider: 'openai', description: '128K context' },
|
||||
{ id: 'gpt-4', name: 'GPT-4', provider: 'openai', description: '8K context' },
|
||||
@@ -132,7 +137,7 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
// GET /api/litellm-api/providers - List all providers
|
||||
if (pathname === '/api/litellm-api/providers' && req.method === 'GET') {
|
||||
try {
|
||||
const providers = getAllProviders(initialPath);
|
||||
const providers = getAllProviders(initialPath).map(sanitizeProviderForResponse);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ providers, count: providers.length }));
|
||||
} catch (err) {
|
||||
@@ -153,13 +158,14 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
|
||||
try {
|
||||
const provider = addProvider(initialPath, providerData);
|
||||
const sanitizedProvider = sanitizeProviderForResponse(provider);
|
||||
|
||||
broadcastToClients({
|
||||
type: 'LITELLM_PROVIDER_CREATED',
|
||||
payload: { provider, timestamp: new Date().toISOString() }
|
||||
payload: { provider: sanitizedProvider, timestamp: new Date().toISOString() }
|
||||
});
|
||||
|
||||
return { success: true, provider };
|
||||
return { success: true, provider: sanitizedProvider };
|
||||
} catch (err) {
|
||||
return { error: (err as Error).message, status: 500 };
|
||||
}
|
||||
@@ -181,7 +187,7 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(provider));
|
||||
res.end(JSON.stringify(sanitizeProviderForResponse(provider)));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (err as Error).message }));
|
||||
@@ -199,13 +205,14 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
|
||||
try {
|
||||
const provider = updateProvider(initialPath, providerId, updates);
|
||||
const sanitizedProvider = sanitizeProviderForResponse(provider);
|
||||
|
||||
broadcastToClients({
|
||||
type: 'LITELLM_PROVIDER_UPDATED',
|
||||
payload: { provider, timestamp: new Date().toISOString() }
|
||||
payload: { provider: sanitizedProvider, timestamp: new Date().toISOString() }
|
||||
});
|
||||
|
||||
return { success: true, provider };
|
||||
return { success: true, provider: sanitizedProvider };
|
||||
} catch (err) {
|
||||
return { error: (err as Error).message, status: 404 };
|
||||
}
|
||||
@@ -397,7 +404,7 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
// GET /api/litellm-api/models/:providerType - Get available models for provider type
|
||||
const modelsMatch = pathname.match(/^\/api\/litellm-api\/models\/([^/]+)$/);
|
||||
if (modelsMatch && req.method === 'GET') {
|
||||
const providerType = modelsMatch[1] as ProviderType;
|
||||
const providerType = modelsMatch[1];
|
||||
|
||||
try {
|
||||
const models = PROVIDER_MODELS[providerType];
|
||||
@@ -589,7 +596,6 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
const { stdout } = await execAsync('pip show ccw-litellm', {
|
||||
timeout: 10000,
|
||||
windowsHide: true,
|
||||
shell: true,
|
||||
});
|
||||
// Parse version from pip show output
|
||||
const versionMatch = stdout.match(/Version:\s*(.+)/i);
|
||||
@@ -608,7 +614,6 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
const { stdout } = await execAsync(`${pythonExe} -c "import ccw_litellm; print(ccw_litellm.__version__)"`, {
|
||||
timeout: 5000,
|
||||
windowsHide: true,
|
||||
shell: true,
|
||||
});
|
||||
const version = stdout.trim();
|
||||
if (version) {
|
||||
@@ -687,11 +692,12 @@ export async function handleLiteLLMApiRoutes(ctx: RouteContext): Promise<boolean
|
||||
if (pathname === '/api/litellm-api/codexlens/rotation/endpoints' && req.method === 'GET') {
|
||||
try {
|
||||
const endpoints = generateRotationEndpoints(initialPath);
|
||||
const sanitizedEndpoints = endpoints.map(sanitizeRotationEndpointForResponse);
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
endpoints,
|
||||
count: endpoints.length,
|
||||
endpoints: sanitizedEndpoints,
|
||||
count: sanitizedEndpoints.length,
|
||||
}));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* LiteLLM Routes Module
|
||||
* Handles all LiteLLM-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { ChatMessage } from '../../tools/litellm-client.js';
|
||||
import { getLiteLLMClient, getLiteLLMStatus, checkLiteLLMAvailable } from '../../tools/litellm-client.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
function isChatMessage(value: unknown): value is ChatMessage {
|
||||
if (typeof value !== 'object' || value === null) return false;
|
||||
const candidate = value as Record<string, unknown>;
|
||||
const role = candidate.role;
|
||||
const content = candidate.content;
|
||||
if (role !== 'system' && role !== 'user' && role !== 'assistant') return false;
|
||||
return typeof content === 'string';
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -29,9 +28,9 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const status = await getLiteLLMStatus();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(status));
|
||||
} catch (err) {
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ available: false, error: err.message }));
|
||||
res.end(JSON.stringify({ available: false, error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -43,9 +42,9 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const config = await client.getConfig();
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(config));
|
||||
} catch (err) {
|
||||
} catch (err: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err.message }));
|
||||
res.end(JSON.stringify({ error: err instanceof Error ? err.message : String(err) }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -53,9 +52,13 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: LiteLLM Embed - Generate embeddings
|
||||
if (pathname === '/api/litellm/embed' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { texts, model = 'default' } = body;
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
if (!texts || !Array.isArray(texts)) {
|
||||
const { texts, model = 'default' } = body as { texts?: unknown; model?: unknown };
|
||||
|
||||
if (!Array.isArray(texts) || texts.some((t) => typeof t !== 'string')) {
|
||||
return { error: 'texts array is required', status: 400 };
|
||||
}
|
||||
|
||||
@@ -65,10 +68,10 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
|
||||
try {
|
||||
const client = getLiteLLMClient();
|
||||
const result = await client.embed(texts, model);
|
||||
const result = await client.embed(texts, typeof model === 'string' ? model : 'default');
|
||||
return { success: true, ...result };
|
||||
} catch (err) {
|
||||
return { error: err.message, status: 500 };
|
||||
} catch (err: unknown) {
|
||||
return { error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
@@ -77,27 +80,32 @@ export async function handleLiteLLMRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: LiteLLM Chat - Chat with LLM
|
||||
if (pathname === '/api/litellm/chat' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { message, messages, model = 'default' } = body;
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const { message, messages, model = 'default' } = body as { message?: unknown; messages?: unknown; model?: unknown };
|
||||
|
||||
// Support both single message and messages array
|
||||
if (!message && (!messages || !Array.isArray(messages))) {
|
||||
if (typeof message !== 'string' && (!Array.isArray(messages) || !messages.every(isChatMessage))) {
|
||||
return { error: 'message or messages array is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const client = getLiteLLMClient();
|
||||
|
||||
if (messages && Array.isArray(messages)) {
|
||||
if (Array.isArray(messages) && messages.every(isChatMessage)) {
|
||||
// Multi-turn chat
|
||||
const result = await client.chatMessages(messages, model);
|
||||
const result = await client.chatMessages(messages, typeof model === 'string' ? model : 'default');
|
||||
return { success: true, ...result };
|
||||
} else {
|
||||
// Single message chat
|
||||
const content = await client.chat(message, model);
|
||||
return { success: true, content, model };
|
||||
const resolvedModel = typeof model === 'string' ? model : 'default';
|
||||
const content = await client.chat(message as string, resolvedModel);
|
||||
return { success: true, content, model: resolvedModel };
|
||||
}
|
||||
} catch (err) {
|
||||
return { error: err.message, status: 500 };
|
||||
} catch (err: unknown) {
|
||||
return { error: err instanceof Error ? err.message : String(err), status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* MCP Routes Module
|
||||
* Handles all MCP-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, writeFileSync, existsSync, mkdirSync, readdirSync, statSync } from 'fs';
|
||||
import { join, dirname } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import * as McpTemplatesDb from './mcp-templates-db.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// Claude config file path
|
||||
const CLAUDE_CONFIG_PATH = join(homedir(), '.claude.json');
|
||||
@@ -432,16 +431,6 @@ function toggleCodexMcpServer(serverName: string, enabled: boolean): { success?:
|
||||
}
|
||||
}
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Helper Functions
|
||||
// ========================================
|
||||
@@ -464,7 +453,7 @@ function getEnterpriseMcpPath(): string {
|
||||
/**
|
||||
* Safely read and parse JSON file
|
||||
*/
|
||||
function safeReadJson(filePath) {
|
||||
function safeReadJson(filePath: string): any | null {
|
||||
try {
|
||||
if (!existsSync(filePath)) return null;
|
||||
const content = readFileSync(filePath, 'utf8');
|
||||
@@ -479,8 +468,8 @@ function safeReadJson(filePath) {
|
||||
* @param {string} filePath
|
||||
* @returns {Object} mcpServers object or empty object
|
||||
*/
|
||||
function getMcpServersFromFile(filePath) {
|
||||
const config = safeReadJson(filePath);
|
||||
function getMcpServersFromFile(filePath: string): Record<string, unknown> {
|
||||
const config = safeReadJson(filePath) as { mcpServers?: Record<string, unknown> } | null;
|
||||
if (!config) return {};
|
||||
return config.mcpServers || {};
|
||||
}
|
||||
@@ -492,7 +481,7 @@ function getMcpServersFromFile(filePath) {
|
||||
* @param {Object} serverConfig - MCP server configuration
|
||||
* @returns {Object} Result with success/error
|
||||
*/
|
||||
function addMcpServerToMcpJson(projectPath, serverName, serverConfig) {
|
||||
function addMcpServerToMcpJson(projectPath: string, serverName: string, serverConfig: unknown) {
|
||||
try {
|
||||
const normalizedPath = normalizePathForFileSystem(projectPath);
|
||||
const mcpJsonPath = join(normalizedPath, '.mcp.json');
|
||||
@@ -530,7 +519,7 @@ function addMcpServerToMcpJson(projectPath, serverName, serverConfig) {
|
||||
* @param {string} serverName - MCP server name
|
||||
* @returns {Object} Result with success/error
|
||||
*/
|
||||
function removeMcpServerFromMcpJson(projectPath, serverName) {
|
||||
function removeMcpServerFromMcpJson(projectPath: string, serverName: string) {
|
||||
try {
|
||||
const normalizedPath = normalizePathForFileSystem(projectPath);
|
||||
const mcpJsonPath = join(normalizedPath, '.mcp.json');
|
||||
@@ -562,6 +551,26 @@ function removeMcpServerFromMcpJson(projectPath, serverName) {
|
||||
}
|
||||
}
|
||||
|
||||
type McpServerConfig = Record<string, unknown>;
|
||||
type McpServers = Record<string, McpServerConfig>;
|
||||
type ProjectConfig = {
|
||||
mcpServers?: McpServers;
|
||||
mcpJsonPath?: string;
|
||||
hasMcpJson?: boolean;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
type ProjectsConfig = Record<string, ProjectConfig>;
|
||||
type ConfigSource = { type: string; path: string; count: number };
|
||||
|
||||
interface McpConfig {
|
||||
projects: ProjectsConfig;
|
||||
userServers: McpServers;
|
||||
enterpriseServers: McpServers;
|
||||
globalServers: McpServers;
|
||||
configSources: ConfigSource[];
|
||||
error?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MCP configuration from multiple sources (per official Claude Code docs):
|
||||
*
|
||||
@@ -575,12 +584,13 @@ function removeMcpServerFromMcpJson(projectPath, serverName) {
|
||||
*
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getMcpConfig() {
|
||||
function getMcpConfig(): McpConfig {
|
||||
try {
|
||||
const result = {
|
||||
const result: McpConfig = {
|
||||
projects: {},
|
||||
userServers: {}, // User-level servers from ~/.claude.json mcpServers
|
||||
enterpriseServers: {}, // Enterprise managed servers (highest priority)
|
||||
globalServers: {}, // Merged user + enterprise
|
||||
configSources: [] // Track where configs came from for debugging
|
||||
};
|
||||
|
||||
@@ -650,7 +660,14 @@ function getMcpConfig() {
|
||||
return result;
|
||||
} catch (error: unknown) {
|
||||
console.error('Error reading MCP config:', error);
|
||||
return { projects: {}, globalServers: {}, userServers: {}, enterpriseServers: {}, configSources: [], error: (error as Error).message };
|
||||
return {
|
||||
projects: {},
|
||||
globalServers: {},
|
||||
userServers: {},
|
||||
enterpriseServers: {},
|
||||
configSources: [],
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -660,7 +677,7 @@ function getMcpConfig() {
|
||||
* @param {string} path
|
||||
* @returns {string}
|
||||
*/
|
||||
function normalizePathForFileSystem(path) {
|
||||
function normalizePathForFileSystem(path: string): string {
|
||||
let normalized = path.replace(/\\/g, '/');
|
||||
|
||||
// Handle /d/path format -> D:/path
|
||||
@@ -678,7 +695,7 @@ function normalizePathForFileSystem(path) {
|
||||
* @param {Object} claudeConfig - Optional existing config to check format
|
||||
* @returns {string}
|
||||
*/
|
||||
function normalizeProjectPathForConfig(path, claudeConfig = null) {
|
||||
function normalizeProjectPathForConfig(path: string, claudeConfig: unknown = null): string {
|
||||
// IMPORTANT: Always normalize to forward slashes to prevent duplicate entries
|
||||
// (e.g., prevents both "D:/Claude_dms3" and "D:\\Claude_dms3")
|
||||
let normalizedForward = path.replace(/\\/g, '/');
|
||||
@@ -699,7 +716,7 @@ function normalizeProjectPathForConfig(path, claudeConfig = null) {
|
||||
* @param {boolean} enable
|
||||
* @returns {Object}
|
||||
*/
|
||||
function toggleMcpServerEnabled(projectPath, serverName, enable) {
|
||||
function toggleMcpServerEnabled(projectPath: string, serverName: string, enable: boolean) {
|
||||
try {
|
||||
if (!existsSync(CLAUDE_CONFIG_PATH)) {
|
||||
return { error: '.claude.json not found' };
|
||||
@@ -723,7 +740,7 @@ function toggleMcpServerEnabled(projectPath, serverName, enable) {
|
||||
|
||||
if (enable) {
|
||||
// Remove from disabled list
|
||||
projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter(s => s !== serverName);
|
||||
projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter((s: string) => s !== serverName);
|
||||
} else {
|
||||
// Add to disabled list if not already there
|
||||
if (!projectConfig.disabledMcpServers.includes(serverName)) {
|
||||
@@ -755,7 +772,7 @@ function toggleMcpServerEnabled(projectPath, serverName, enable) {
|
||||
* @param {boolean} useLegacyConfig - If true, use .claude.json instead of .mcp.json
|
||||
* @returns {Object}
|
||||
*/
|
||||
function addMcpServerToProject(projectPath, serverName, serverConfig, useLegacyConfig = false) {
|
||||
function addMcpServerToProject(projectPath: string, serverName: string, serverConfig: unknown, useLegacyConfig: boolean = false) {
|
||||
try {
|
||||
// Default: Use .mcp.json for project-level MCP servers
|
||||
if (!useLegacyConfig) {
|
||||
@@ -823,7 +840,7 @@ function addMcpServerToProject(projectPath, serverName, serverConfig, useLegacyC
|
||||
* @param {string} serverName
|
||||
* @returns {Object}
|
||||
*/
|
||||
function removeMcpServerFromProject(projectPath, serverName) {
|
||||
function removeMcpServerFromProject(projectPath: string, serverName: string) {
|
||||
try {
|
||||
const normalizedPathForFile = normalizePathForFileSystem(projectPath);
|
||||
const mcpJsonPath = join(normalizedPathForFile, '.mcp.json');
|
||||
@@ -859,7 +876,7 @@ function removeMcpServerFromProject(projectPath, serverName) {
|
||||
|
||||
// Also remove from disabled list if present
|
||||
if (projectConfig.disabledMcpServers) {
|
||||
projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter(s => s !== serverName);
|
||||
projectConfig.disabledMcpServers = projectConfig.disabledMcpServers.filter((s: string) => s !== serverName);
|
||||
}
|
||||
|
||||
// Write back to file
|
||||
@@ -894,7 +911,7 @@ function removeMcpServerFromProject(projectPath, serverName) {
|
||||
* @param {Object} serverConfig
|
||||
* @returns {Object}
|
||||
*/
|
||||
function addGlobalMcpServer(serverName, serverConfig) {
|
||||
function addGlobalMcpServer(serverName: string, serverConfig: unknown) {
|
||||
try {
|
||||
if (!existsSync(CLAUDE_CONFIG_PATH)) {
|
||||
return { error: '.claude.json not found' };
|
||||
@@ -931,7 +948,7 @@ function addGlobalMcpServer(serverName, serverConfig) {
|
||||
* @param {string} serverName
|
||||
* @returns {Object}
|
||||
*/
|
||||
function removeGlobalMcpServer(serverName) {
|
||||
function removeGlobalMcpServer(serverName: string) {
|
||||
try {
|
||||
if (!existsSync(CLAUDE_CONFIG_PATH)) {
|
||||
return { error: '.claude.json not found' };
|
||||
@@ -967,7 +984,7 @@ function removeGlobalMcpServer(serverName) {
|
||||
* @param {string} filePath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function readSettingsFile(filePath) {
|
||||
function readSettingsFile(filePath: string) {
|
||||
try {
|
||||
if (!existsSync(filePath)) {
|
||||
return {};
|
||||
@@ -985,7 +1002,7 @@ function readSettingsFile(filePath) {
|
||||
* @param {string} filePath
|
||||
* @param {Object} settings
|
||||
*/
|
||||
function writeSettingsFile(filePath, settings) {
|
||||
function writeSettingsFile(filePath: string, settings: any) {
|
||||
const dirPath = dirname(filePath);
|
||||
// Ensure directory exists
|
||||
if (!existsSync(dirPath)) {
|
||||
@@ -999,7 +1016,7 @@ function writeSettingsFile(filePath, settings) {
|
||||
* @param {string} projectPath
|
||||
* @returns {string}
|
||||
*/
|
||||
function getProjectSettingsPath(projectPath) {
|
||||
function getProjectSettingsPath(projectPath: string): string {
|
||||
// path.join automatically handles cross-platform path separators
|
||||
return join(projectPath, '.claude', 'settings.json');
|
||||
}
|
||||
@@ -1008,6 +1025,10 @@ function getProjectSettingsPath(projectPath) {
|
||||
// Route Handlers
|
||||
// ========================================
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === 'object' && value !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle MCP routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
@@ -1043,11 +1064,22 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Add Codex MCP server
|
||||
if (pathname === '/api/codex-mcp-add' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { serverName, serverConfig } = body;
|
||||
if (!serverName || !serverConfig) {
|
||||
return { error: 'serverName and serverConfig are required', status: 400 };
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
return addCodexMcpServer(serverName, serverConfig);
|
||||
|
||||
const serverName = body.serverName;
|
||||
const serverConfig = body.serverConfig;
|
||||
|
||||
if (typeof serverName !== 'string' || !serverName.trim()) {
|
||||
return { error: 'serverName is required', status: 400 };
|
||||
}
|
||||
|
||||
if (!isRecord(serverConfig)) {
|
||||
return { error: 'serverConfig is required', status: 400 };
|
||||
}
|
||||
|
||||
return addCodexMcpServer(serverName, serverConfig as Record<string, any>);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
@@ -1055,8 +1087,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Remove Codex MCP server
|
||||
if (pathname === '/api/codex-mcp-remove' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { serverName } = body;
|
||||
if (!serverName) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const serverName = body.serverName;
|
||||
if (typeof serverName !== 'string' || !serverName.trim()) {
|
||||
return { error: 'serverName is required', status: 400 };
|
||||
}
|
||||
return removeCodexMcpServer(serverName);
|
||||
@@ -1067,8 +1103,14 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Toggle Codex MCP server enabled state
|
||||
if (pathname === '/api/codex-mcp-toggle' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { serverName, enabled } = body;
|
||||
if (!serverName || enabled === undefined) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const serverName = body.serverName;
|
||||
const enabled = body.enabled;
|
||||
|
||||
if (typeof serverName !== 'string' || !serverName.trim() || typeof enabled !== 'boolean') {
|
||||
return { error: 'serverName and enabled are required', status: 400 };
|
||||
}
|
||||
return toggleCodexMcpServer(serverName, enabled);
|
||||
@@ -1079,9 +1121,16 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Toggle MCP server enabled/disabled
|
||||
if (pathname === '/api/mcp-toggle' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath, serverName, enable } = body;
|
||||
if (!projectPath || !serverName) {
|
||||
return { error: 'projectPath and serverName are required', status: 400 };
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const projectPath = body.projectPath;
|
||||
const serverName = body.serverName;
|
||||
const enable = body.enable;
|
||||
|
||||
if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim() || typeof enable !== 'boolean') {
|
||||
return { error: 'projectPath, serverName, and enable are required', status: 400 };
|
||||
}
|
||||
return toggleMcpServerEnabled(projectPath, serverName, enable);
|
||||
});
|
||||
@@ -1091,8 +1140,16 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Copy MCP server to project
|
||||
if (pathname === '/api/mcp-copy-server' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath, serverName, serverConfig, configType } = body;
|
||||
if (!projectPath || !serverName || !serverConfig) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const projectPath = body.projectPath;
|
||||
const serverName = body.serverName;
|
||||
const serverConfig = body.serverConfig;
|
||||
const configType = body.configType;
|
||||
|
||||
if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim() || serverConfig === undefined || serverConfig === null) {
|
||||
return { error: 'projectPath, serverName, and serverConfig are required', status: 400 };
|
||||
}
|
||||
// configType: 'mcp' = use .mcp.json (default), 'claude' = use .claude.json
|
||||
@@ -1105,8 +1162,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Install CCW MCP server to project
|
||||
if (pathname === '/api/mcp-install-ccw' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath } = body;
|
||||
if (!projectPath) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const projectPath = body.projectPath;
|
||||
if (typeof projectPath !== 'string' || !projectPath.trim()) {
|
||||
return { error: 'projectPath is required', status: 400 };
|
||||
}
|
||||
|
||||
@@ -1129,8 +1190,13 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Remove MCP server from project
|
||||
if (pathname === '/api/mcp-remove-server' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { projectPath, serverName } = body;
|
||||
if (!projectPath || !serverName) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const projectPath = body.projectPath;
|
||||
const serverName = body.serverName;
|
||||
if (typeof projectPath !== 'string' || !projectPath.trim() || typeof serverName !== 'string' || !serverName.trim()) {
|
||||
return { error: 'projectPath and serverName are required', status: 400 };
|
||||
}
|
||||
return removeMcpServerFromProject(projectPath, serverName);
|
||||
@@ -1141,8 +1207,13 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Add MCP server to global scope (top-level mcpServers in ~/.claude.json)
|
||||
if (pathname === '/api/mcp-add-global-server' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { serverName, serverConfig } = body;
|
||||
if (!serverName || !serverConfig) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const serverName = body.serverName;
|
||||
const serverConfig = body.serverConfig;
|
||||
if (typeof serverName !== 'string' || !serverName.trim() || serverConfig === undefined || serverConfig === null) {
|
||||
return { error: 'serverName and serverConfig are required', status: 400 };
|
||||
}
|
||||
return addGlobalMcpServer(serverName, serverConfig);
|
||||
@@ -1153,8 +1224,12 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Remove MCP server from global scope
|
||||
if (pathname === '/api/mcp-remove-global-server' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { serverName } = body;
|
||||
if (!serverName) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const serverName = body.serverName;
|
||||
if (typeof serverName !== 'string' || !serverName.trim()) {
|
||||
return { error: 'serverName is required', status: 400 };
|
||||
}
|
||||
return removeGlobalMcpServer(serverName);
|
||||
@@ -1177,14 +1252,29 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Save MCP template
|
||||
if (pathname === '/api/mcp-templates' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { name, description, serverConfig, tags, category } = body;
|
||||
if (!name || !serverConfig) {
|
||||
return { error: 'name and serverConfig are required', status: 400 };
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const name = body.name;
|
||||
const serverConfig = body.serverConfig;
|
||||
|
||||
if (typeof name !== 'string' || !name.trim()) {
|
||||
return { error: 'name is required', status: 400 };
|
||||
}
|
||||
|
||||
if (!isRecord(serverConfig) || typeof serverConfig.command !== 'string') {
|
||||
return { error: 'serverConfig with command is required', status: 400 };
|
||||
}
|
||||
|
||||
const description = typeof body.description === 'string' ? body.description : undefined;
|
||||
const tags = Array.isArray(body.tags) ? body.tags.filter((tag): tag is string => typeof tag === 'string') : undefined;
|
||||
const category = typeof body.category === 'string' ? body.category : undefined;
|
||||
|
||||
return McpTemplatesDb.saveTemplate({
|
||||
name,
|
||||
description,
|
||||
serverConfig,
|
||||
serverConfig: serverConfig as McpTemplatesDb.McpTemplate['serverConfig'],
|
||||
tags,
|
||||
category
|
||||
});
|
||||
@@ -1244,8 +1334,15 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Install template to project or global
|
||||
if (pathname === '/api/mcp-templates/install' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { templateName, projectPath, scope } = body;
|
||||
if (!templateName) {
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const templateName = body.templateName;
|
||||
const projectPath = body.projectPath;
|
||||
const scope = body.scope;
|
||||
|
||||
if (typeof templateName !== 'string' || !templateName.trim()) {
|
||||
return { error: 'templateName is required', status: 400 };
|
||||
}
|
||||
|
||||
@@ -1258,7 +1355,7 @@ export async function handleMcpRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (scope === 'global') {
|
||||
return addGlobalMcpServer(templateName, template.serverConfig);
|
||||
} else {
|
||||
if (!projectPath) {
|
||||
if (typeof projectPath !== 'string' || !projectPath.trim()) {
|
||||
return { error: 'projectPath is required for project scope', status: 400 };
|
||||
}
|
||||
return addMcpServerToProject(projectPath, templateName, template.serverConfig);
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* MCP Routes Module
|
||||
* Handles all MCP-related API endpoints
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* MCP Templates Database Module
|
||||
* Stores MCP server configurations as reusable templates
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Navigation Status Routes Module
|
||||
* Aggregated status endpoint for navigation bar badge updates
|
||||
@@ -6,18 +5,10 @@
|
||||
* API Endpoints:
|
||||
* - GET /api/nav-status - Get aggregated navigation bar status (counts for all badges)
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { existsSync, readFileSync, readdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
// ========== Count Helper Functions ==========
|
||||
|
||||
|
||||
@@ -1,22 +1,51 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Rules Routes Module
|
||||
* Handles all Rules-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, existsSync, readdirSync, unlinkSync, promises as fsPromises } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { executeCliTool } from '../../tools/cli-executor.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
interface ParsedRuleFrontmatter {
|
||||
paths: string[];
|
||||
content: string;
|
||||
}
|
||||
|
||||
interface RuleDetail {
|
||||
name: string;
|
||||
paths: string[];
|
||||
content: string;
|
||||
location: string;
|
||||
path: string;
|
||||
subdirectory: string | null;
|
||||
}
|
||||
|
||||
interface RuleConfigResult {
|
||||
projectRules: RuleDetail[];
|
||||
userRules: RuleDetail[];
|
||||
}
|
||||
|
||||
interface RuleCreateParams {
|
||||
fileName: string;
|
||||
content: string;
|
||||
paths: string[];
|
||||
location: string;
|
||||
subdirectory: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
interface RuleGenerateParams {
|
||||
generationType: string;
|
||||
description?: string;
|
||||
templateType?: string;
|
||||
extractScope?: string;
|
||||
extractFocus?: string;
|
||||
fileName: string;
|
||||
location: string;
|
||||
subdirectory: string;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -24,8 +53,8 @@ export interface RouteContext {
|
||||
* @param {string} content
|
||||
* @returns {Object}
|
||||
*/
|
||||
function parseRuleFrontmatter(content) {
|
||||
const result = {
|
||||
function parseRuleFrontmatter(content: string): ParsedRuleFrontmatter {
|
||||
const result: ParsedRuleFrontmatter = {
|
||||
paths: [],
|
||||
content: content
|
||||
};
|
||||
@@ -64,8 +93,8 @@ function parseRuleFrontmatter(content) {
|
||||
* @param {string} subdirectory
|
||||
* @returns {Object[]}
|
||||
*/
|
||||
function scanRulesDirectory(dirPath, location, subdirectory) {
|
||||
const rules = [];
|
||||
function scanRulesDirectory(dirPath: string, location: string, subdirectory: string): RuleDetail[] {
|
||||
const rules: RuleDetail[] = [];
|
||||
|
||||
try {
|
||||
const entries = readdirSync(dirPath, { withFileTypes: true });
|
||||
@@ -102,8 +131,8 @@ function scanRulesDirectory(dirPath, location, subdirectory) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getRulesConfig(projectPath) {
|
||||
const result = {
|
||||
function getRulesConfig(projectPath: string): RuleConfigResult {
|
||||
const result: RuleConfigResult = {
|
||||
projectRules: [],
|
||||
userRules: []
|
||||
};
|
||||
@@ -135,7 +164,7 @@ function getRulesConfig(projectPath) {
|
||||
* @param {string} ruleName
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function findRuleFile(baseDir, ruleName) {
|
||||
function findRuleFile(baseDir: string, ruleName: string): string | null {
|
||||
try {
|
||||
// Direct path
|
||||
const directPath = join(baseDir, ruleName);
|
||||
@@ -164,7 +193,7 @@ function findRuleFile(baseDir, ruleName) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getRuleDetail(ruleName, location, projectPath) {
|
||||
function getRuleDetail(ruleName: string, location: string, projectPath: string): { rule?: RuleDetail; error?: string } {
|
||||
try {
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPath, '.claude', 'rules')
|
||||
@@ -180,17 +209,26 @@ function getRuleDetail(ruleName, location, projectPath) {
|
||||
const content = readFileSync(rulePath, 'utf8');
|
||||
const parsed = parseRuleFrontmatter(content);
|
||||
|
||||
const normalizedBaseDir = baseDir.replace(/\\/g, '/').replace(/\/+$/, '');
|
||||
const normalizedRulePath = rulePath.replace(/\\/g, '/');
|
||||
const relativePath = normalizedRulePath.startsWith(`${normalizedBaseDir}/`)
|
||||
? normalizedRulePath.slice(normalizedBaseDir.length + 1)
|
||||
: ruleName;
|
||||
const relativeParts = relativePath.split('/');
|
||||
const subdirectory = relativeParts.length > 1 ? relativeParts.slice(0, -1).join('/') : null;
|
||||
|
||||
return {
|
||||
rule: {
|
||||
name: ruleName,
|
||||
paths: parsed.paths,
|
||||
content: parsed.content,
|
||||
location,
|
||||
path: rulePath
|
||||
path: rulePath,
|
||||
subdirectory
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
return { error: error instanceof Error ? error.message : String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,7 +239,11 @@ function getRuleDetail(ruleName, location, projectPath) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function deleteRule(ruleName, location, projectPath) {
|
||||
function deleteRule(
|
||||
ruleName: string,
|
||||
location: string,
|
||||
projectPath: string
|
||||
): { success: true; ruleName: string; location: string } | { error: string; status?: number } {
|
||||
try {
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPath, '.claude', 'rules')
|
||||
@@ -217,7 +259,7 @@ function deleteRule(ruleName, location, projectPath) {
|
||||
|
||||
return { success: true, ruleName, location };
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
return { error: error instanceof Error ? error.message : String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -540,7 +582,7 @@ RULES: $(cat ~/.claude/workflows/cli-templates/prompts/universal/00-universal-ri
|
||||
* @param {boolean} params.enableReview - Optional: enable secondary review
|
||||
* @returns {Object}
|
||||
*/
|
||||
async function generateRuleViaCLI(params) {
|
||||
async function generateRuleViaCLI(params: RuleGenerateParams): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const {
|
||||
generationType,
|
||||
@@ -682,8 +724,8 @@ FILE NAME: ${fileName}`;
|
||||
executionId: result.conversation?.id,
|
||||
review: reviewResult
|
||||
};
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
} catch (error: unknown) {
|
||||
return { error: error instanceof Error ? error.message : String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -698,7 +740,7 @@ FILE NAME: ${fileName}`;
|
||||
* @param {string} params.projectPath - Project root path
|
||||
* @returns {Object}
|
||||
*/
|
||||
async function createRule(params) {
|
||||
async function createRule(params: RuleCreateParams): Promise<Record<string, unknown>> {
|
||||
try {
|
||||
const { fileName, content, paths, location, subdirectory, projectPath } = params;
|
||||
|
||||
@@ -749,8 +791,8 @@ paths: [${paths.join(', ')}]
|
||||
path: filePath,
|
||||
subdirectory: subdirectory || null
|
||||
};
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
} catch (error: unknown) {
|
||||
return { error: error instanceof Error ? error.message : String(error) };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -790,8 +832,11 @@ export async function handleRulesRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (pathname.startsWith('/api/rules/') && req.method === 'DELETE') {
|
||||
const ruleName = decodeURIComponent(pathname.replace('/api/rules/', ''));
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { location, projectPath: projectPathParam } = body;
|
||||
return deleteRule(ruleName, location, projectPathParam || initialPath);
|
||||
const { location, projectPath: projectPathParam } = body as { location?: unknown; projectPath?: unknown };
|
||||
const resolvedLocation = typeof location === 'string' && location.trim().length > 0 ? location : 'project';
|
||||
const resolvedProjectPath =
|
||||
typeof projectPathParam === 'string' && projectPathParam.trim().length > 0 ? projectPathParam : initialPath;
|
||||
return deleteRule(ruleName, resolvedLocation, resolvedProjectPath);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
@@ -807,63 +852,89 @@ export async function handleRulesRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
location,
|
||||
subdirectory,
|
||||
projectPath: projectPathParam,
|
||||
// CLI generation parameters
|
||||
generationType,
|
||||
description,
|
||||
templateType,
|
||||
extractScope,
|
||||
extractFocus
|
||||
} = body;
|
||||
} = body as {
|
||||
mode?: unknown;
|
||||
fileName?: unknown;
|
||||
content?: unknown;
|
||||
paths?: unknown;
|
||||
location?: unknown;
|
||||
subdirectory?: unknown;
|
||||
projectPath?: unknown;
|
||||
generationType?: unknown;
|
||||
description?: unknown;
|
||||
templateType?: unknown;
|
||||
extractScope?: unknown;
|
||||
extractFocus?: unknown;
|
||||
};
|
||||
|
||||
if (!fileName) {
|
||||
const resolvedMode = typeof mode === 'string' ? mode : '';
|
||||
const resolvedFileName = typeof fileName === 'string' ? fileName : '';
|
||||
const resolvedContent = typeof content === 'string' ? content : '';
|
||||
const resolvedLocation = typeof location === 'string' && location.trim().length > 0 ? location : '';
|
||||
const resolvedSubdirectory = typeof subdirectory === 'string' ? subdirectory : '';
|
||||
const resolvedProjectPath =
|
||||
typeof projectPathParam === 'string' && projectPathParam.trim().length > 0 ? projectPathParam : initialPath;
|
||||
const resolvedGenerationType = typeof generationType === 'string' ? generationType : '';
|
||||
const resolvedDescription = typeof description === 'string' ? description : undefined;
|
||||
const resolvedTemplateType = typeof templateType === 'string' ? templateType : undefined;
|
||||
const resolvedExtractScope = typeof extractScope === 'string' ? extractScope : undefined;
|
||||
const resolvedExtractFocus = typeof extractFocus === 'string' ? extractFocus : undefined;
|
||||
const resolvedPaths = Array.isArray(paths) ? paths.filter((p): p is string => typeof p === 'string') : [];
|
||||
|
||||
if (!resolvedFileName) {
|
||||
return { error: 'File name is required' };
|
||||
}
|
||||
|
||||
if (!location) {
|
||||
if (!resolvedLocation) {
|
||||
return { error: 'Location is required (project or user)' };
|
||||
}
|
||||
|
||||
const projectPath = projectPathParam || initialPath;
|
||||
const projectPath = resolvedProjectPath;
|
||||
|
||||
// CLI generation mode
|
||||
if (mode === 'cli-generate') {
|
||||
if (!generationType) {
|
||||
if (resolvedMode === 'cli-generate') {
|
||||
if (!resolvedGenerationType) {
|
||||
return { error: 'generationType is required for CLI generation mode' };
|
||||
}
|
||||
|
||||
// Validate based on generation type
|
||||
if (generationType === 'description' && !description) {
|
||||
if (resolvedGenerationType === 'description' && !resolvedDescription) {
|
||||
return { error: 'description is required for description-based generation' };
|
||||
}
|
||||
|
||||
if (generationType === 'template' && !templateType) {
|
||||
if (resolvedGenerationType === 'template' && !resolvedTemplateType) {
|
||||
return { error: 'templateType is required for template-based generation' };
|
||||
}
|
||||
|
||||
return await generateRuleViaCLI({
|
||||
generationType,
|
||||
description,
|
||||
templateType,
|
||||
extractScope,
|
||||
extractFocus,
|
||||
fileName,
|
||||
location,
|
||||
subdirectory: subdirectory || '',
|
||||
generationType: resolvedGenerationType,
|
||||
description: resolvedDescription,
|
||||
templateType: resolvedTemplateType,
|
||||
extractScope: resolvedExtractScope,
|
||||
extractFocus: resolvedExtractFocus,
|
||||
fileName: resolvedFileName,
|
||||
location: resolvedLocation,
|
||||
subdirectory: resolvedSubdirectory || '',
|
||||
projectPath
|
||||
});
|
||||
}
|
||||
|
||||
// Manual creation mode
|
||||
if (!content) {
|
||||
if (!resolvedContent) {
|
||||
return { error: 'Content is required for manual creation' };
|
||||
}
|
||||
|
||||
return await createRule({
|
||||
fileName,
|
||||
content,
|
||||
paths: paths || [],
|
||||
location,
|
||||
subdirectory: subdirectory || '',
|
||||
fileName: resolvedFileName,
|
||||
content: resolvedContent,
|
||||
paths: resolvedPaths,
|
||||
location: resolvedLocation,
|
||||
subdirectory: resolvedSubdirectory || '',
|
||||
projectPath
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,21 +1,10 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Session Routes Module
|
||||
* Handles all Session/Task-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, writeFileSync, existsSync, readdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
/**
|
||||
* Get session detail data (context, summaries, impl-plan, review)
|
||||
@@ -23,8 +12,8 @@ export interface RouteContext {
|
||||
* @param {string} dataType - Type of data to load ('all', 'context', 'tasks', 'summary', 'plan', 'explorations', 'conflict', 'impl-plan', 'review')
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function getSessionDetailData(sessionPath, dataType) {
|
||||
const result = {};
|
||||
async function getSessionDetailData(sessionPath: string, dataType: string): Promise<Record<string, unknown>> {
|
||||
const result: any = {};
|
||||
|
||||
// Normalize path
|
||||
const normalizedPath = sessionPath.replace(/\\/g, '/');
|
||||
@@ -66,7 +55,7 @@ async function getSessionDetailData(sessionPath, dataType) {
|
||||
}
|
||||
}
|
||||
// Sort by task ID
|
||||
result.tasks.sort((a, b) => a.task_id.localeCompare(b.task_id));
|
||||
result.tasks.sort((a: { task_id: string }, b: { task_id: string }) => a.task_id.localeCompare(b.task_id));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,7 +330,7 @@ async function getSessionDetailData(sessionPath, dataType) {
|
||||
* @param {string} newStatus - New status (pending, in_progress, completed)
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function updateTaskStatus(sessionPath, taskId, newStatus) {
|
||||
async function updateTaskStatus(sessionPath: string, taskId: string, newStatus: string): Promise<Record<string, unknown>> {
|
||||
// Normalize path (handle both forward and back slashes)
|
||||
let normalizedPath = sessionPath.replace(/\\/g, '/');
|
||||
|
||||
@@ -429,9 +418,17 @@ export async function handleSessionRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Update task status
|
||||
if (pathname === '/api/update-task-status' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { sessionPath, taskId, newStatus } = body;
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
if (!sessionPath || !taskId || !newStatus) {
|
||||
const { sessionPath, taskId, newStatus } = body as {
|
||||
sessionPath?: unknown;
|
||||
taskId?: unknown;
|
||||
newStatus?: unknown;
|
||||
};
|
||||
|
||||
if (typeof sessionPath !== 'string' || typeof taskId !== 'string' || typeof newStatus !== 'string') {
|
||||
return { error: 'sessionPath, taskId, and newStatus are required', status: 400 };
|
||||
}
|
||||
|
||||
@@ -443,19 +440,28 @@ export async function handleSessionRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Bulk update task status
|
||||
if (pathname === '/api/bulk-update-task-status' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { sessionPath, taskIds, newStatus } = body;
|
||||
if (typeof body !== 'object' || body === null) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
if (!sessionPath || !taskIds || !newStatus) {
|
||||
const { sessionPath, taskIds, newStatus } = body as {
|
||||
sessionPath?: unknown;
|
||||
taskIds?: unknown;
|
||||
newStatus?: unknown;
|
||||
};
|
||||
|
||||
if (typeof sessionPath !== 'string' || !Array.isArray(taskIds) || typeof newStatus !== 'string') {
|
||||
return { error: 'sessionPath, taskIds, and newStatus are required', status: 400 };
|
||||
}
|
||||
|
||||
const results = [];
|
||||
const results: Array<Record<string, unknown>> = [];
|
||||
for (const taskId of taskIds) {
|
||||
if (typeof taskId !== 'string') continue;
|
||||
try {
|
||||
const result = await updateTaskStatus(sessionPath, taskId, newStatus);
|
||||
results.push(result);
|
||||
} catch (err) {
|
||||
results.push({ taskId, error: err.message });
|
||||
results.push({ taskId, error: err instanceof Error ? err.message : String(err) });
|
||||
}
|
||||
}
|
||||
return { success: true, results };
|
||||
|
||||
@@ -1,22 +1,64 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Skills Routes Module
|
||||
* Handles all Skills-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { readFileSync, existsSync, readdirSync, statSync, unlinkSync, promises as fsPromises } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { executeCliTool } from '../../tools/cli-executor.js';
|
||||
import { validatePath as validateAllowedPath } from '../../utils/path-validator.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
type SkillLocation = 'project' | 'user';
|
||||
|
||||
interface ParsedSkillFrontmatter {
|
||||
name: string;
|
||||
description: string;
|
||||
version: string | null;
|
||||
allowedTools: string[];
|
||||
content: string;
|
||||
}
|
||||
|
||||
interface SkillSummary {
|
||||
name: string;
|
||||
folderName: string;
|
||||
description: string;
|
||||
version: string | null;
|
||||
allowedTools: string[];
|
||||
location: SkillLocation;
|
||||
path: string;
|
||||
supportingFiles: string[];
|
||||
}
|
||||
|
||||
interface SkillsConfig {
|
||||
projectSkills: SkillSummary[];
|
||||
userSkills: SkillSummary[];
|
||||
}
|
||||
|
||||
interface SkillInfo {
|
||||
name: string;
|
||||
description: string;
|
||||
version: string | null;
|
||||
allowedTools: string[];
|
||||
supportingFiles: string[];
|
||||
}
|
||||
|
||||
type SkillFolderValidation =
|
||||
| { valid: true; errors: string[]; skillInfo: SkillInfo }
|
||||
| { valid: false; errors: string[]; skillInfo: null };
|
||||
|
||||
type GenerationType = 'description' | 'template';
|
||||
|
||||
interface GenerationParams {
|
||||
generationType: GenerationType;
|
||||
description?: string;
|
||||
skillName: string;
|
||||
location: SkillLocation;
|
||||
projectPath: string;
|
||||
}
|
||||
|
||||
function isRecord(value: unknown): value is Record<string, unknown> {
|
||||
return typeof value === 'object' && value !== null;
|
||||
}
|
||||
|
||||
// ========== Skills Helper Functions ==========
|
||||
@@ -26,8 +68,8 @@ export interface RouteContext {
|
||||
* @param {string} content - Skill file content
|
||||
* @returns {Object} Parsed frontmatter and content
|
||||
*/
|
||||
function parseSkillFrontmatter(content) {
|
||||
const result = {
|
||||
function parseSkillFrontmatter(content: string): ParsedSkillFrontmatter {
|
||||
const result: ParsedSkillFrontmatter = {
|
||||
name: '',
|
||||
description: '',
|
||||
version: null,
|
||||
@@ -58,7 +100,11 @@ function parseSkillFrontmatter(content) {
|
||||
result.version = value.replace(/^["']|["']$/g, '');
|
||||
} else if (key === 'allowed-tools' || key === 'allowedtools') {
|
||||
// Parse as comma-separated or YAML array
|
||||
result.allowedTools = value.replace(/^\[|\]$/g, '').split(',').map(t => t.trim()).filter(Boolean);
|
||||
result.allowedTools = value
|
||||
.replace(/^\[|\]$/g, '')
|
||||
.split(',')
|
||||
.map((tool) => tool.trim())
|
||||
.filter(Boolean);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -75,8 +121,8 @@ function parseSkillFrontmatter(content) {
|
||||
* @param {string} skillDir
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getSupportingFiles(skillDir) {
|
||||
const files = [];
|
||||
function getSupportingFiles(skillDir: string): string[] {
|
||||
const files: string[] = [];
|
||||
try {
|
||||
const entries = readdirSync(skillDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
@@ -99,8 +145,8 @@ function getSupportingFiles(skillDir) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getSkillsConfig(projectPath) {
|
||||
const result = {
|
||||
function getSkillsConfig(projectPath: string): SkillsConfig {
|
||||
const result: SkillsConfig = {
|
||||
projectSkills: [],
|
||||
userSkills: []
|
||||
};
|
||||
@@ -179,17 +225,44 @@ function getSkillsConfig(projectPath) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function getSkillDetail(skillName, location, projectPath) {
|
||||
async function getSkillDetail(skillName: string, location: SkillLocation, projectPath: string, initialPath: string) {
|
||||
try {
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPath, '.claude', 'skills')
|
||||
: join(homedir(), '.claude', 'skills');
|
||||
if (skillName.includes('/') || skillName.includes('\\')) {
|
||||
return { error: 'Access denied', status: 403 };
|
||||
}
|
||||
if (skillName.includes('..')) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
|
||||
let baseDir;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
const validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
baseDir = join(validatedProjectPath, '.claude', 'skills');
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
} else {
|
||||
baseDir = join(homedir(), '.claude', 'skills');
|
||||
}
|
||||
|
||||
const skillDir = join(baseDir, skillName);
|
||||
const skillMdPath = join(skillDir, 'SKILL.md');
|
||||
const skillMdCandidate = join(skillDir, 'SKILL.md');
|
||||
|
||||
if (!existsSync(skillMdPath)) {
|
||||
return { error: 'Skill not found' };
|
||||
let skillMdPath;
|
||||
try {
|
||||
skillMdPath = await validateAllowedPath(skillMdCandidate, { mustExist: true, allowedDirectories: [skillDir] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
if (message.includes('File not found')) {
|
||||
return { error: 'Skill not found', status: 404 };
|
||||
}
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
|
||||
const content = readFileSync(skillMdPath, 'utf8');
|
||||
@@ -210,7 +283,7 @@ function getSkillDetail(skillName, location, projectPath) {
|
||||
}
|
||||
};
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
return { error: (error as Error).message, status: 500 };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -221,38 +294,50 @@ function getSkillDetail(skillName, location, projectPath) {
|
||||
* @param {string} projectPath
|
||||
* @returns {Object}
|
||||
*/
|
||||
function deleteSkill(skillName, location, projectPath) {
|
||||
async function deleteSkill(skillName: string, location: SkillLocation, projectPath: string, initialPath: string) {
|
||||
try {
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPath, '.claude', 'skills')
|
||||
: join(homedir(), '.claude', 'skills');
|
||||
|
||||
const skillDir = join(baseDir, skillName);
|
||||
|
||||
if (!existsSync(skillDir)) {
|
||||
return { error: 'Skill not found' };
|
||||
if (skillName.includes('/') || skillName.includes('\\')) {
|
||||
return { error: 'Access denied', status: 403 };
|
||||
}
|
||||
if (skillName.includes('..')) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
|
||||
// Recursively delete directory
|
||||
const deleteRecursive = (dirPath) => {
|
||||
if (existsSync(dirPath)) {
|
||||
readdirSync(dirPath).forEach((file) => {
|
||||
const curPath = join(dirPath, file);
|
||||
if (statSync(curPath).isDirectory()) {
|
||||
deleteRecursive(curPath);
|
||||
} else {
|
||||
unlinkSync(curPath);
|
||||
}
|
||||
});
|
||||
fsPromises.rmdir(dirPath);
|
||||
let baseDir;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
const validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
baseDir = join(validatedProjectPath, '.claude', 'skills');
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
};
|
||||
} else {
|
||||
baseDir = join(homedir(), '.claude', 'skills');
|
||||
}
|
||||
|
||||
deleteRecursive(skillDir);
|
||||
const skillDirCandidate = join(baseDir, skillName);
|
||||
|
||||
let skillDir;
|
||||
try {
|
||||
skillDir = await validateAllowedPath(skillDirCandidate, { mustExist: true, allowedDirectories: [baseDir] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
if (message.includes('File not found')) {
|
||||
return { error: 'Skill not found', status: 404 };
|
||||
}
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
|
||||
await fsPromises.rm(skillDir, { recursive: true, force: true });
|
||||
|
||||
return { success: true, skillName, location };
|
||||
} catch (error) {
|
||||
return { error: (error as Error).message };
|
||||
return { error: (error as Error).message, status: 500 };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,8 +346,8 @@ function deleteSkill(skillName, location, projectPath) {
|
||||
* @param {string} folderPath - Path to skill folder
|
||||
* @returns {Object} Validation result with skill info
|
||||
*/
|
||||
function validateSkillFolder(folderPath) {
|
||||
const errors = [];
|
||||
function validateSkillFolder(folderPath: string): SkillFolderValidation {
|
||||
const errors: string[] = [];
|
||||
|
||||
// Check if folder exists
|
||||
if (!existsSync(folderPath)) {
|
||||
@@ -327,7 +412,7 @@ function validateSkillFolder(folderPath) {
|
||||
* @param {string} source - Source directory path
|
||||
* @param {string} target - Target directory path
|
||||
*/
|
||||
async function copyDirectoryRecursive(source, target) {
|
||||
async function copyDirectoryRecursive(source: string, target: string): Promise<void> {
|
||||
await fsPromises.mkdir(target, { recursive: true });
|
||||
|
||||
const entries = await fsPromises.readdir(source, { withFileTypes: true });
|
||||
@@ -352,7 +437,7 @@ async function copyDirectoryRecursive(source, target) {
|
||||
* @param {string} customName - Optional custom name for skill
|
||||
* @returns {Object}
|
||||
*/
|
||||
async function importSkill(sourcePath, location, projectPath, customName) {
|
||||
async function importSkill(sourcePath: string, location: SkillLocation, projectPath: string, customName?: string) {
|
||||
try {
|
||||
// Validate source folder
|
||||
const validation = validateSkillFolder(sourcePath);
|
||||
@@ -371,6 +456,9 @@ async function importSkill(sourcePath, location, projectPath, customName) {
|
||||
|
||||
// Determine target folder name
|
||||
const skillName = customName || validation.skillInfo.name;
|
||||
if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
const targetPath = join(baseDir, skillName);
|
||||
|
||||
// Check if already exists
|
||||
@@ -402,7 +490,7 @@ async function importSkill(sourcePath, location, projectPath, customName) {
|
||||
* @param {string} params.projectPath - Project root path
|
||||
* @returns {Object}
|
||||
*/
|
||||
async function generateSkillViaCLI({ generationType, description, skillName, location, projectPath }) {
|
||||
async function generateSkillViaCLI({ generationType, description, skillName, location, projectPath }: GenerationParams) {
|
||||
try {
|
||||
// Validate inputs
|
||||
if (!skillName) {
|
||||
@@ -523,9 +611,19 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Get all skills (project and user)
|
||||
if (pathname === '/api/skills') {
|
||||
const projectPathParam = url.searchParams.get('path') || initialPath;
|
||||
const skillsData = getSkillsConfig(projectPathParam);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(skillsData));
|
||||
|
||||
try {
|
||||
const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
const skillsData = getSkillsConfig(validatedProjectPath);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(skillsData));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path', projectSkills: [], userSkills: [] }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -537,18 +635,46 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const location = url.searchParams.get('location') || 'project';
|
||||
const projectPathParam = url.searchParams.get('path') || initialPath;
|
||||
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPathParam, '.claude', 'skills')
|
||||
: join(homedir(), '.claude', 'skills');
|
||||
if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid skill name' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const dirPath = subPath
|
||||
? join(baseDir, skillName, subPath)
|
||||
: join(baseDir, skillName);
|
||||
let baseDir: string;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
baseDir = join(validatedProjectPath, '.claude', 'skills');
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' }));
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
baseDir = join(homedir(), '.claude', 'skills');
|
||||
}
|
||||
|
||||
// Security check: ensure path is within skill folder
|
||||
if (!dirPath.startsWith(join(baseDir, skillName))) {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Access denied' }));
|
||||
const skillRoot = join(baseDir, skillName);
|
||||
const requestedDir = subPath ? join(skillRoot, subPath) : skillRoot;
|
||||
|
||||
let dirPath: string;
|
||||
try {
|
||||
dirPath = await validateAllowedPath(requestedDir, { mustExist: true, allowedDirectories: [skillRoot] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
if (message.includes('File not found')) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Directory not found' }));
|
||||
return true;
|
||||
}
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -596,16 +722,46 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
return true;
|
||||
}
|
||||
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPathParam, '.claude', 'skills')
|
||||
: join(homedir(), '.claude', 'skills');
|
||||
if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid skill name' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
const filePath = join(baseDir, skillName, fileName);
|
||||
let baseDir: string;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
const validatedProjectPath = await validateAllowedPath(projectPathParam, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
baseDir = join(validatedProjectPath, '.claude', 'skills');
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' }));
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
baseDir = join(homedir(), '.claude', 'skills');
|
||||
}
|
||||
|
||||
// Security check: ensure file is within skill folder
|
||||
if (!filePath.startsWith(join(baseDir, skillName))) {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Access denied' }));
|
||||
const skillRoot = join(baseDir, skillName);
|
||||
const requestedFile = join(skillRoot, fileName);
|
||||
|
||||
let filePath: string;
|
||||
try {
|
||||
filePath = await validateAllowedPath(requestedFile, { mustExist: true, allowedDirectories: [skillRoot] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
if (message.includes('File not found')) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'File not found' }));
|
||||
return true;
|
||||
}
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: status === 403 ? 'Access denied' : 'Invalid path' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -632,25 +788,54 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const skillName = decodeURIComponent(pathParts[3]);
|
||||
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { fileName, content, location, projectPath: projectPathParam } = body;
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
if (!fileName) {
|
||||
const fileName = body.fileName;
|
||||
const content = body.content;
|
||||
const location: SkillLocation = body.location === 'project' ? 'project' : 'user';
|
||||
const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined;
|
||||
|
||||
if (typeof fileName !== 'string' || !fileName) {
|
||||
return { error: 'fileName is required' };
|
||||
}
|
||||
|
||||
if (content === undefined) {
|
||||
if (typeof content !== 'string') {
|
||||
return { error: 'content is required' };
|
||||
}
|
||||
|
||||
const baseDir = location === 'project'
|
||||
? join(projectPathParam || initialPath, '.claude', 'skills')
|
||||
: join(homedir(), '.claude', 'skills');
|
||||
if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
|
||||
const filePath = join(baseDir, skillName, fileName);
|
||||
let baseDir: string;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
const projectRoot = projectPathParam || initialPath;
|
||||
const validatedProjectPath = await validateAllowedPath(projectRoot, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
baseDir = join(validatedProjectPath, '.claude', 'skills');
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
} else {
|
||||
baseDir = join(homedir(), '.claude', 'skills');
|
||||
}
|
||||
|
||||
// Security check: ensure file is within skill folder
|
||||
if (!filePath.startsWith(join(baseDir, skillName))) {
|
||||
return { error: 'Access denied' };
|
||||
const skillRoot = join(baseDir, skillName);
|
||||
const requestedFile = join(skillRoot, fileName);
|
||||
|
||||
let filePath: string;
|
||||
try {
|
||||
filePath = await validateAllowedPath(requestedFile, { allowedDirectories: [skillRoot] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -667,25 +852,43 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (pathname.startsWith('/api/skills/') && req.method === 'GET' &&
|
||||
!pathname.endsWith('/skills/') && !pathname.endsWith('/dir') && !pathname.endsWith('/file')) {
|
||||
const skillName = decodeURIComponent(pathname.replace('/api/skills/', ''));
|
||||
const location = url.searchParams.get('location') || 'project';
|
||||
const locationParam = url.searchParams.get('location');
|
||||
const location: SkillLocation = locationParam === 'user' ? 'user' : 'project';
|
||||
const projectPathParam = url.searchParams.get('path') || initialPath;
|
||||
const skillDetail = getSkillDetail(skillName, location, projectPathParam);
|
||||
const skillDetail = await getSkillDetail(skillName, location, projectPathParam, initialPath);
|
||||
if (skillDetail.error) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(skillDetail));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(skillDetail));
|
||||
res.writeHead(skillDetail.status || 404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: skillDetail.error }));
|
||||
return true;
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(skillDetail));
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: Delete skill
|
||||
if (pathname.startsWith('/api/skills/') && req.method === 'DELETE') {
|
||||
const skillName = decodeURIComponent(pathname.replace('/api/skills/', ''));
|
||||
if (skillName.includes('/') || skillName.includes('\\')) {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Access denied' }));
|
||||
return true;
|
||||
}
|
||||
if (skillName.includes('..')) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Invalid skill name' }));
|
||||
return true;
|
||||
}
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { location, projectPath: projectPathParam } = body;
|
||||
return deleteSkill(skillName, location, projectPathParam || initialPath);
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
const location: SkillLocation = body.location === 'project' ? 'project' : 'user';
|
||||
const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined;
|
||||
|
||||
return deleteSkill(skillName, location, projectPathParam || initialPath, initialPath);
|
||||
});
|
||||
return true;
|
||||
}
|
||||
@@ -693,11 +896,24 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Validate skill import
|
||||
if (pathname === '/api/skills/validate-import' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { sourcePath } = body;
|
||||
if (!sourcePath) {
|
||||
if (!isRecord(body)) {
|
||||
return { valid: false, errors: ['Source path is required'], skillInfo: null };
|
||||
}
|
||||
return validateSkillFolder(sourcePath);
|
||||
|
||||
const sourcePath = body.sourcePath;
|
||||
if (typeof sourcePath !== 'string' || !sourcePath.trim()) {
|
||||
return { valid: false, errors: ['Source path is required'], skillInfo: null };
|
||||
}
|
||||
|
||||
try {
|
||||
const validatedSourcePath = await validateAllowedPath(sourcePath, { mustExist: true });
|
||||
return validateSkillFolder(validatedSourcePath);
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
@@ -705,37 +921,77 @@ export async function handleSkillsRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// API: Create/Import skill
|
||||
if (pathname === '/api/skills/create' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { mode, location, sourcePath, skillName, description, generationType, projectPath: projectPathParam } = body;
|
||||
if (!isRecord(body)) {
|
||||
return { error: 'Invalid request body', status: 400 };
|
||||
}
|
||||
|
||||
if (!mode) {
|
||||
const mode = body.mode;
|
||||
const locationValue = body.location;
|
||||
const sourcePath = typeof body.sourcePath === 'string' ? body.sourcePath : undefined;
|
||||
const skillName = typeof body.skillName === 'string' ? body.skillName : undefined;
|
||||
const description = typeof body.description === 'string' ? body.description : undefined;
|
||||
const generationType = typeof body.generationType === 'string' ? body.generationType : undefined;
|
||||
const projectPathParam = typeof body.projectPath === 'string' ? body.projectPath : undefined;
|
||||
|
||||
if (typeof mode !== 'string' || !mode) {
|
||||
return { error: 'Mode is required (import or cli-generate)' };
|
||||
}
|
||||
|
||||
if (!location) {
|
||||
if (locationValue !== 'project' && locationValue !== 'user') {
|
||||
return { error: 'Location is required (project or user)' };
|
||||
}
|
||||
|
||||
const location: SkillLocation = locationValue;
|
||||
const projectPath = projectPathParam || initialPath;
|
||||
|
||||
let validatedProjectPath = projectPath;
|
||||
if (location === 'project') {
|
||||
try {
|
||||
validatedProjectPath = await validateAllowedPath(projectPath, { mustExist: true, allowedDirectories: [initialPath] });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Project path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
}
|
||||
|
||||
if (mode === 'import') {
|
||||
// Import mode: copy existing skill folder
|
||||
if (!sourcePath) {
|
||||
return { error: 'Source path is required for import mode' };
|
||||
}
|
||||
|
||||
return await importSkill(sourcePath, location, projectPath, skillName);
|
||||
if (skillName && (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..'))) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
|
||||
let validatedSourcePath;
|
||||
try {
|
||||
validatedSourcePath = await validateAllowedPath(sourcePath, { mustExist: true });
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
const status = message.includes('Access denied') ? 403 : 400;
|
||||
console.error(`[Skills] Path validation failed: ${message}`);
|
||||
return { error: status === 403 ? 'Access denied' : 'Invalid path', status };
|
||||
}
|
||||
|
||||
return await importSkill(validatedSourcePath, location, validatedProjectPath, skillName);
|
||||
} else if (mode === 'cli-generate') {
|
||||
// CLI generate mode: use Claude to generate skill
|
||||
if (!skillName) {
|
||||
return { error: 'Skill name is required for CLI generation mode' };
|
||||
}
|
||||
if (skillName.includes('/') || skillName.includes('\\') || skillName.includes('..')) {
|
||||
return { error: 'Invalid skill name', status: 400 };
|
||||
}
|
||||
|
||||
return await generateSkillViaCLI({
|
||||
generationType: generationType || 'description',
|
||||
generationType: generationType === 'template' ? 'template' : 'description',
|
||||
description,
|
||||
skillName,
|
||||
location,
|
||||
projectPath
|
||||
projectPath: validatedProjectPath
|
||||
});
|
||||
} else {
|
||||
return { error: 'Invalid mode. Must be "import" or "cli-generate"' };
|
||||
|
||||
@@ -1,14 +1,13 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Status Routes Module
|
||||
* Aggregated status endpoint for faster dashboard loading
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { existsSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { getCliToolsStatus } from '../../tools/cli-executor.js';
|
||||
import { checkVenvStatus, checkSemanticStatus } from '../../tools/codex-lens.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
/**
|
||||
* Check CCW installation status
|
||||
@@ -54,16 +53,6 @@ function checkCcwInstallStatus(): {
|
||||
};
|
||||
}
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle status routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* System Routes Module
|
||||
* Handles all system-related API endpoints
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import type { Server } from 'http';
|
||||
import { readFileSync, existsSync, promises as fsPromises } from 'fs';
|
||||
import { join } from 'path';
|
||||
@@ -17,17 +15,11 @@ import {
|
||||
cleanAllStorage,
|
||||
resolveProjectId,
|
||||
projectExists,
|
||||
formatBytes
|
||||
} from '../../tools/storage-manager.js';
|
||||
formatBytes
|
||||
} from '../../tools/storage-manager.js';
|
||||
import type { RouteContext } from './types.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
interface SystemRouteContext extends RouteContext {
|
||||
server: Server;
|
||||
}
|
||||
|
||||
@@ -39,7 +31,7 @@ export interface RouteContext {
|
||||
const NPM_PACKAGE_NAME = 'claude-code-workflow';
|
||||
|
||||
// Cache for version check (avoid too frequent requests)
|
||||
let versionCheckCache = null;
|
||||
let versionCheckCache: Record<string, unknown> | null = null;
|
||||
let versionCheckTime = 0;
|
||||
const VERSION_CHECK_CACHE_TTL = 3600000; // 1 hour
|
||||
|
||||
@@ -83,7 +75,7 @@ function compareVersions(v1: string, v2: string): number {
|
||||
* Check npm registry for latest version
|
||||
* @returns {Promise<Object>}
|
||||
*/
|
||||
async function checkNpmVersion(): Promise<any> {
|
||||
async function checkNpmVersion(): Promise<Record<string, unknown>> {
|
||||
// Return cached result if still valid
|
||||
const now = Date.now();
|
||||
if (versionCheckCache && (now - versionCheckTime) < VERSION_CHECK_CACHE_TTL) {
|
||||
@@ -103,8 +95,8 @@ async function checkNpmVersion(): Promise<any> {
|
||||
throw new Error('HTTP ' + response.status);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const latestVersion = data.version;
|
||||
const data = await response.json() as { version?: unknown };
|
||||
const latestVersion = typeof data.version === 'string' ? data.version : currentVersion;
|
||||
|
||||
// Compare versions
|
||||
const hasUpdate = compareVersions(latestVersion, currentVersion) > 0;
|
||||
@@ -174,10 +166,11 @@ async function getWorkflowData(projectPath: string): Promise<any> {
|
||||
const sessions = await scanSessions(workflowDir);
|
||||
const data = await aggregateData(sessions, workflowDir);
|
||||
|
||||
data.projectPath = normalizePathForDisplay(resolvedPath);
|
||||
data.recentPaths = getRecentPaths();
|
||||
|
||||
return data;
|
||||
return {
|
||||
...data,
|
||||
projectPath: normalizePathForDisplay(resolvedPath),
|
||||
recentPaths: getRecentPaths()
|
||||
};
|
||||
}
|
||||
|
||||
// ========================================
|
||||
@@ -188,7 +181,7 @@ async function getWorkflowData(projectPath: string): Promise<any> {
|
||||
* Handle System routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleSystemRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
export async function handleSystemRoutes(ctx: SystemRouteContext): Promise<boolean> {
|
||||
const { pathname, url, req, res, initialPath, handlePostRequest, broadcastToClients, server } = ctx;
|
||||
|
||||
// API: Get workflow data for a path
|
||||
|
||||
25
ccw/src/core/routes/types.ts
Normal file
25
ccw/src/core/routes/types.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
export type PostRequestHandler = (body: unknown) => Promise<unknown>;
|
||||
|
||||
export interface RouteContext {
|
||||
/** URL pathname (e.g. `/api/status`). */
|
||||
pathname: string;
|
||||
/** Parsed request URL. */
|
||||
url: URL;
|
||||
/** Incoming HTTP request. */
|
||||
req: IncomingMessage;
|
||||
/** HTTP response to write to. */
|
||||
res: ServerResponse;
|
||||
/** Initial path configured for the server (used for dashboard routes). */
|
||||
initialPath: string;
|
||||
/** Helper that parses JSON body and passes it to `handler`. */
|
||||
handlePostRequest: (
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
handler: PostRequestHandler
|
||||
) => void;
|
||||
/** Broadcast payload to connected dashboard clients. */
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
import http from 'http';
|
||||
import { URL } from 'url';
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
@@ -27,11 +26,20 @@ import { handleHelpRoutes } from './routes/help-routes.js';
|
||||
import { handleLiteLLMRoutes } from './routes/litellm-routes.js';
|
||||
import { handleLiteLLMApiRoutes } from './routes/litellm-api-routes.js';
|
||||
import { handleNavStatusRoutes } from './routes/nav-status-routes.js';
|
||||
import { handleAuthRoutes } from './routes/auth-routes.js';
|
||||
|
||||
// Import WebSocket handling
|
||||
import { handleWebSocketUpgrade, broadcastToClients } from './websocket.js';
|
||||
import { handleWebSocketUpgrade, broadcastToClients, extractSessionIdFromPath } from './websocket.js';
|
||||
|
||||
import { getTokenManager } from './auth/token-manager.js';
|
||||
import { authMiddleware, isLocalhostRequest, setAuthCookie } from './auth/middleware.js';
|
||||
import { getCorsOrigin } from './cors.js';
|
||||
import { csrfValidation } from './auth/csrf-middleware.js';
|
||||
import { getCsrfTokenManager } from './auth/csrf-manager.js';
|
||||
import { randomBytes } from 'crypto';
|
||||
|
||||
import type { ServerConfig } from '../types/config.js';
|
||||
import type { PostRequestHandler } from './routes/types.js';
|
||||
|
||||
interface ServerOptions {
|
||||
port?: number;
|
||||
@@ -40,13 +48,7 @@ interface ServerOptions {
|
||||
open?: boolean;
|
||||
}
|
||||
|
||||
interface PostResult {
|
||||
error?: string;
|
||||
status?: number;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
type PostHandler = (body: unknown) => Promise<PostResult>;
|
||||
type PostHandler = PostRequestHandler;
|
||||
|
||||
// Template paths
|
||||
const TEMPLATE_PATH = join(import.meta.dirname, '../../src/templates/dashboard.html');
|
||||
@@ -158,28 +160,131 @@ const MODULE_FILES = [
|
||||
* Handle POST request with JSON body
|
||||
*/
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: PostHandler): void {
|
||||
let body = '';
|
||||
req.on('data', chunk => { body += chunk; });
|
||||
req.on('end', async () => {
|
||||
const cachedParsed = (req as any).body;
|
||||
const cachedRawBody = (req as any).__ccwRawBody;
|
||||
|
||||
const handleBody = async (parsed: unknown) => {
|
||||
try {
|
||||
const parsed = JSON.parse(body);
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result.error) {
|
||||
const status = result.status || 500;
|
||||
const isObjectResult = typeof result === 'object' && result !== null;
|
||||
const errorValue = isObjectResult && 'error' in result ? (result as { error?: unknown }).error : undefined;
|
||||
const statusValue = isObjectResult && 'status' in result ? (result as { status?: unknown }).status : undefined;
|
||||
|
||||
if (typeof errorValue === 'string' && errorValue.length > 0) {
|
||||
const status = typeof statusValue === 'number' ? statusValue : 500;
|
||||
res.writeHead(status, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
res.end(JSON.stringify({ error: errorValue }));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
res.end(JSON.stringify({ error: message }));
|
||||
}
|
||||
};
|
||||
|
||||
if (cachedParsed !== undefined) {
|
||||
void handleBody(cachedParsed);
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof cachedRawBody === 'string') {
|
||||
try {
|
||||
void handleBody(JSON.parse(cachedRawBody));
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let body = '';
|
||||
req.on('data', (chunk: Buffer) => { body += chunk.toString(); });
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
(req as any).__ccwRawBody = body;
|
||||
const parsed = JSON.parse(body);
|
||||
(req as any).body = parsed;
|
||||
await handleBody(parsed);
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: message }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function getHeaderValue(header: string | string[] | undefined): string | null {
|
||||
if (!header) return null;
|
||||
if (Array.isArray(header)) return header[0] ?? null;
|
||||
return header;
|
||||
}
|
||||
|
||||
function parseCookieHeader(cookieHeader: string | null | undefined): Record<string, string> {
|
||||
if (!cookieHeader) return {};
|
||||
|
||||
const cookies: Record<string, string> = {};
|
||||
for (const part of cookieHeader.split(';')) {
|
||||
const [rawName, ...rawValueParts] = part.trim().split('=');
|
||||
if (!rawName) continue;
|
||||
const rawValue = rawValueParts.join('=');
|
||||
try {
|
||||
cookies[rawName] = decodeURIComponent(rawValue);
|
||||
} catch {
|
||||
cookies[rawName] = rawValue;
|
||||
}
|
||||
}
|
||||
return cookies;
|
||||
}
|
||||
|
||||
function appendSetCookie(res: http.ServerResponse, cookie: string): void {
|
||||
const existing = res.getHeader('Set-Cookie');
|
||||
if (!existing) {
|
||||
res.setHeader('Set-Cookie', cookie);
|
||||
return;
|
||||
}
|
||||
|
||||
if (Array.isArray(existing)) {
|
||||
res.setHeader('Set-Cookie', [...existing, cookie]);
|
||||
return;
|
||||
}
|
||||
|
||||
res.setHeader('Set-Cookie', [String(existing), cookie]);
|
||||
}
|
||||
|
||||
function getOrCreateSessionId(req: http.IncomingMessage, res: http.ServerResponse): string {
|
||||
const cookies = parseCookieHeader(getHeaderValue(req.headers.cookie));
|
||||
const existing = cookies.ccw_session_id;
|
||||
if (existing) return existing;
|
||||
|
||||
const created = randomBytes(16).toString('hex');
|
||||
const attributes = [
|
||||
`ccw_session_id=${encodeURIComponent(created)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${24 * 60 * 60}`,
|
||||
];
|
||||
appendSetCookie(res, attributes.join('; '));
|
||||
return created;
|
||||
}
|
||||
|
||||
function setCsrfCookie(res: http.ServerResponse, token: string, maxAgeSeconds: number): void {
|
||||
const attributes = [
|
||||
`XSRF-TOKEN=${encodeURIComponent(token)}`,
|
||||
'Path=/',
|
||||
'HttpOnly',
|
||||
'SameSite=Strict',
|
||||
`Max-Age=${maxAgeSeconds}`,
|
||||
];
|
||||
appendSetCookie(res, attributes.join('; '));
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate dashboard HTML with embedded CSS and JS
|
||||
*/
|
||||
@@ -244,17 +349,27 @@ window.INITIAL_PATH = '${normalizePathForDisplay(initialPath).replace(/\\/g, '/'
|
||||
* @returns {Promise<http.Server>}
|
||||
*/
|
||||
export async function startServer(options: ServerOptions = {}): Promise<http.Server> {
|
||||
const port = options.port ?? 3456;
|
||||
let serverPort = options.port ?? 3456;
|
||||
const initialPath = options.initialPath || process.cwd();
|
||||
const host = options.host ?? '127.0.0.1';
|
||||
|
||||
const tokenManager = getTokenManager();
|
||||
const secretKey = tokenManager.getSecretKey();
|
||||
tokenManager.getOrCreateAuthToken();
|
||||
const unauthenticatedPaths = new Set<string>(['/api/auth/token', '/api/csrf-token']);
|
||||
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url, `http://localhost:${port}`);
|
||||
const url = new URL(req.url ?? '/', `http://localhost:${serverPort}`);
|
||||
const pathname = url.pathname;
|
||||
|
||||
// CORS headers for API requests
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
const originHeader = Array.isArray(req.headers.origin) ? req.headers.origin[0] : req.headers.origin;
|
||||
res.setHeader('Access-Control-Allow-Origin', getCorsOrigin(originHeader, serverPort));
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, POST, PUT, PATCH, DELETE, OPTIONS');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type, Authorization, X-CSRF-Token');
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
res.setHeader('Access-Control-Expose-Headers', 'X-CSRF-Token');
|
||||
res.setHeader('Vary', 'Origin');
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.writeHead(200);
|
||||
@@ -277,12 +392,43 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients,
|
||||
extractSessionIdFromPath,
|
||||
server
|
||||
};
|
||||
|
||||
// Token acquisition endpoint (localhost-only)
|
||||
if (pathname === '/api/auth/token') {
|
||||
if (!isLocalhostRequest(req)) {
|
||||
res.writeHead(403, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify({ error: 'Forbidden' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const tokenResult = tokenManager.getOrCreateAuthToken();
|
||||
setAuthCookie(res, tokenResult.token, tokenResult.expiresAt);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json; charset=utf-8' });
|
||||
res.end(JSON.stringify({ token: tokenResult.token, expiresAt: tokenResult.expiresAt.toISOString() }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Authentication middleware for all API routes
|
||||
if (pathname.startsWith('/api/')) {
|
||||
const ok = authMiddleware({ pathname, req, res, tokenManager, secretKey, unauthenticatedPaths });
|
||||
if (!ok) return;
|
||||
}
|
||||
|
||||
// CSRF validation middleware for state-changing API routes
|
||||
if (pathname.startsWith('/api/')) {
|
||||
const ok = await csrfValidation({ pathname, req, res });
|
||||
if (!ok) return;
|
||||
}
|
||||
|
||||
// Try each route handler in order
|
||||
// Order matters: more specific routes should come before general ones
|
||||
|
||||
// Auth routes (/api/csrf-token)
|
||||
if (await handleAuthRoutes(routeContext)) return;
|
||||
|
||||
// Status routes (/api/status/*) - Aggregated endpoint for faster loading
|
||||
if (pathname.startsWith('/api/status/')) {
|
||||
if (await handleStatusRoutes(routeContext)) return;
|
||||
@@ -401,6 +547,15 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
|
||||
// Serve dashboard HTML
|
||||
if (pathname === '/' || pathname === '/index.html') {
|
||||
if (isLocalhostRequest(req)) {
|
||||
const tokenResult = tokenManager.getOrCreateAuthToken();
|
||||
setAuthCookie(res, tokenResult.token, tokenResult.expiresAt);
|
||||
|
||||
const sessionId = getOrCreateSessionId(req, res);
|
||||
const csrfToken = getCsrfTokenManager().generateToken(sessionId);
|
||||
res.setHeader('X-CSRF-Token', csrfToken);
|
||||
setCsrfCookie(res, csrfToken, 15 * 60);
|
||||
}
|
||||
const html = generateServerDashboard(initialPath);
|
||||
res.writeHead(200, { 'Content-Type': 'text/html; charset=utf-8' });
|
||||
res.end(html);
|
||||
@@ -418,8 +573,8 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
if (pathname.startsWith('/assets/')) {
|
||||
const assetPath = join(ASSETS_DIR, pathname.replace('/assets/', ''));
|
||||
if (existsSync(assetPath)) {
|
||||
const ext = assetPath.split('.').pop().toLowerCase();
|
||||
const mimeTypes = {
|
||||
const ext = assetPath.split('.').pop()?.toLowerCase();
|
||||
const mimeTypes: Record<string, string> = {
|
||||
'js': 'application/javascript',
|
||||
'css': 'text/css',
|
||||
'json': 'application/json',
|
||||
@@ -431,7 +586,7 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
'woff2': 'font/woff2',
|
||||
'ttf': 'font/ttf'
|
||||
};
|
||||
const contentType = mimeTypes[ext] || 'application/octet-stream';
|
||||
const contentType = ext ? mimeTypes[ext] ?? 'application/octet-stream' : 'application/octet-stream';
|
||||
const content = readFileSync(assetPath);
|
||||
res.writeHead(200, {
|
||||
'Content-Type': contentType,
|
||||
@@ -448,8 +603,9 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
|
||||
} catch (error: unknown) {
|
||||
console.error('Server error:', error);
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
res.end(JSON.stringify({ error: message }));
|
||||
}
|
||||
});
|
||||
|
||||
@@ -463,10 +619,15 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
});
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
server.listen(port, () => {
|
||||
console.log(`Dashboard server running at http://localhost:${port}`);
|
||||
console.log(`WebSocket endpoint available at ws://localhost:${port}/ws`);
|
||||
console.log(`Hook endpoint available at POST http://localhost:${port}/api/hook`);
|
||||
server.listen(serverPort, host, () => {
|
||||
const addr = server.address();
|
||||
if (addr && typeof addr === 'object') {
|
||||
serverPort = addr.port;
|
||||
}
|
||||
|
||||
console.log(`Dashboard server running at http://${host}:${serverPort}`);
|
||||
console.log(`WebSocket endpoint available at ws://${host}:${serverPort}/ws`);
|
||||
console.log(`Hook endpoint available at POST http://${host}:${serverPort}/api/hook`);
|
||||
resolve(server);
|
||||
});
|
||||
server.on('error', reject);
|
||||
|
||||
@@ -1,11 +1,17 @@
|
||||
// @ts-nocheck
|
||||
import { createHash } from 'crypto';
|
||||
import type { IncomingMessage } from 'http';
|
||||
import type { Duplex } from 'stream';
|
||||
|
||||
// WebSocket clients for real-time notifications
|
||||
export const wsClients = new Set();
|
||||
export const wsClients = new Set<Duplex>();
|
||||
|
||||
export function handleWebSocketUpgrade(req, socket, head) {
|
||||
const key = req.headers['sec-websocket-key'];
|
||||
export function handleWebSocketUpgrade(req: IncomingMessage, socket: Duplex, _head: Buffer): void {
|
||||
const header = req.headers['sec-websocket-key'];
|
||||
const key = Array.isArray(header) ? header[0] : header;
|
||||
if (!key) {
|
||||
socket.end();
|
||||
return;
|
||||
}
|
||||
const acceptKey = createHash('sha1')
|
||||
.update(key + '258EAFA5-E914-47DA-95CA-C5AB0DC85B11')
|
||||
.digest('base64');
|
||||
@@ -26,7 +32,7 @@ export function handleWebSocketUpgrade(req, socket, head) {
|
||||
console.log(`[WS] Client connected (${wsClients.size} total)`);
|
||||
|
||||
// Handle incoming messages
|
||||
socket.on('data', (buffer) => {
|
||||
socket.on('data', (buffer: Buffer) => {
|
||||
try {
|
||||
const frame = parseWebSocketFrame(buffer);
|
||||
if (!frame) return;
|
||||
@@ -74,7 +80,7 @@ export function handleWebSocketUpgrade(req, socket, head) {
|
||||
* Parse WebSocket frame (simplified)
|
||||
* Returns { opcode, payload } or null
|
||||
*/
|
||||
export function parseWebSocketFrame(buffer) {
|
||||
export function parseWebSocketFrame(buffer: Buffer): { opcode: number; payload: string } | null {
|
||||
if (buffer.length < 2) return null;
|
||||
|
||||
const firstByte = buffer[0];
|
||||
@@ -97,7 +103,7 @@ export function parseWebSocketFrame(buffer) {
|
||||
offset = 10;
|
||||
}
|
||||
|
||||
let mask = null;
|
||||
let mask: Buffer | null = null;
|
||||
if (isMasked) {
|
||||
mask = buffer.slice(offset, offset + 4);
|
||||
offset += 4;
|
||||
@@ -117,7 +123,7 @@ export function parseWebSocketFrame(buffer) {
|
||||
/**
|
||||
* Create WebSocket frame
|
||||
*/
|
||||
export function createWebSocketFrame(data) {
|
||||
export function createWebSocketFrame(data: unknown): Buffer {
|
||||
const payload = Buffer.from(JSON.stringify(data), 'utf8');
|
||||
const length = payload.length;
|
||||
|
||||
@@ -147,7 +153,7 @@ export function createWebSocketFrame(data) {
|
||||
/**
|
||||
* Broadcast message to all connected WebSocket clients
|
||||
*/
|
||||
export function broadcastToClients(data) {
|
||||
export function broadcastToClients(data: unknown): void {
|
||||
const frame = createWebSocketFrame(data);
|
||||
|
||||
for (const client of wsClients) {
|
||||
@@ -158,13 +164,15 @@ export function broadcastToClients(data) {
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`[WS] Broadcast to ${wsClients.size} clients:`, data.type);
|
||||
const eventType =
|
||||
typeof data === 'object' && data !== null && 'type' in data ? (data as { type?: unknown }).type : undefined;
|
||||
console.log(`[WS] Broadcast to ${wsClients.size} clients:`, eventType);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract session ID from file path
|
||||
*/
|
||||
export function extractSessionIdFromPath(filePath) {
|
||||
export function extractSessionIdFromPath(filePath: string): string | null {
|
||||
// Normalize path
|
||||
const normalized = filePath.replace(/\\/g, '/');
|
||||
|
||||
|
||||
29
ccw/src/tools/README.md
Normal file
29
ccw/src/tools/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Tools
|
||||
|
||||
This directory contains CCW “tools”: self-contained modules that implement concrete functionality (executors, integrations, etc.) that higher-level CLI and route layers call into.
|
||||
|
||||
## CLI Executor
|
||||
|
||||
The CLI executor is split into focused modules to keep responsibilities clear and keep the public API stable via re-exports.
|
||||
|
||||
**Entry point**
|
||||
- `ccw/src/tools/cli-executor.ts` – thin facade that re-exports from `cli-executor-core.ts` (stable import path for callers).
|
||||
|
||||
**Modules**
|
||||
- `ccw/src/tools/cli-executor-core.ts` – orchestrates tool execution, resume/merge logic, and conversation persistence wiring.
|
||||
- `ccw/src/tools/cli-executor-utils.ts` – debug logging, tool availability checks (with cache), command building.
|
||||
- `ccw/src/tools/cli-executor-state.ts` – conversation/history types + SQLite-backed storage helpers.
|
||||
- `ccw/src/tools/cli-prompt-builder.ts` – prompt concatenation helpers (plain/YAML/JSON) and merged-conversation prompt formatting.
|
||||
|
||||
**Dependency flow (high level)**
|
||||
```
|
||||
cli-executor.ts
|
||||
-> cli-executor-core.ts
|
||||
-> cli-executor-utils.ts
|
||||
-> cli-executor-state.ts
|
||||
-> cli-prompt-builder.ts
|
||||
```
|
||||
|
||||
**Public API**
|
||||
- Prefer importing from `ccw/src/tools/cli-executor.ts`.
|
||||
- `cli-executor-core.ts` re-exports prompt helpers/types from `cli-prompt-builder.ts` to preserve existing imports (`PromptConcatenator`, `buildPrompt`, `PromptFormat`, etc.).
|
||||
1074
ccw/src/tools/cli-executor-core.ts
Normal file
1074
ccw/src/tools/cli-executor-core.ts
Normal file
File diff suppressed because it is too large
Load Diff
553
ccw/src/tools/cli-executor-state.ts
Normal file
553
ccw/src/tools/cli-executor-state.ts
Normal file
@@ -0,0 +1,553 @@
|
||||
/**
|
||||
* CLI Executor State
|
||||
* Conversation history + execution record storage (SQLite-backed)
|
||||
*/
|
||||
|
||||
import type { HistoryIndexEntry } from './cli-history-store.js';
|
||||
import { StoragePaths, ensureStorageDir } from '../config/storage-paths.js';
|
||||
|
||||
// Lazy-loaded SQLite store module
|
||||
let sqliteStoreModule: typeof import('./cli-history-store.js') | null = null;
|
||||
|
||||
/**
|
||||
* Get or initialize SQLite store (async)
|
||||
*/
|
||||
export async function getSqliteStore(baseDir: string) {
|
||||
if (!sqliteStoreModule) {
|
||||
sqliteStoreModule = await import('./cli-history-store.js');
|
||||
}
|
||||
return sqliteStoreModule.getHistoryStore(baseDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get SQLite store (sync - uses cached module)
|
||||
*/
|
||||
function getSqliteStoreSync(baseDir: string) {
|
||||
if (!sqliteStoreModule) {
|
||||
throw new Error('SQLite store not initialized. Call an async function first.');
|
||||
}
|
||||
return sqliteStoreModule.getHistoryStore(baseDir);
|
||||
}
|
||||
|
||||
// Execution category types
|
||||
export type ExecutionCategory = 'user' | 'internal' | 'insight';
|
||||
|
||||
// Single turn in a conversation
|
||||
export interface ConversationTurn {
|
||||
turn: number;
|
||||
timestamp: string;
|
||||
prompt: string;
|
||||
duration_ms: number;
|
||||
status: 'success' | 'error' | 'timeout';
|
||||
exit_code: number | null;
|
||||
output: {
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
truncated: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
// Multi-turn conversation record
|
||||
export interface ConversationRecord {
|
||||
id: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
tool: string;
|
||||
model: string;
|
||||
mode: string;
|
||||
category: ExecutionCategory; // user | internal | insight
|
||||
total_duration_ms: number;
|
||||
turn_count: number;
|
||||
latest_status: 'success' | 'error' | 'timeout';
|
||||
turns: ConversationTurn[];
|
||||
parent_execution_id?: string; // For fork/retry scenarios
|
||||
}
|
||||
|
||||
// Legacy single execution record (for backward compatibility)
|
||||
export interface ExecutionRecord {
|
||||
id: string;
|
||||
timestamp: string;
|
||||
tool: string;
|
||||
model: string;
|
||||
mode: string;
|
||||
prompt: string;
|
||||
status: 'success' | 'error' | 'timeout';
|
||||
exit_code: number | null;
|
||||
duration_ms: number;
|
||||
output: {
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
truncated: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface HistoryIndex {
|
||||
version: number;
|
||||
total_executions: number;
|
||||
executions: {
|
||||
id: string;
|
||||
timestamp: string; // created_at for conversations
|
||||
updated_at?: string; // last update time
|
||||
tool: string;
|
||||
status: string;
|
||||
duration_ms: number;
|
||||
turn_count?: number; // number of turns in conversation
|
||||
prompt_preview: string;
|
||||
}[];
|
||||
}
|
||||
|
||||
export interface ExecutionOutput {
|
||||
success: boolean;
|
||||
execution: ExecutionRecord;
|
||||
conversation: ConversationRecord; // Full conversation record
|
||||
stdout: string;
|
||||
stderr: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure history directory exists (uses centralized storage)
|
||||
*/
|
||||
export function ensureHistoryDir(baseDir: string): string {
|
||||
const paths = StoragePaths.project(baseDir);
|
||||
ensureStorageDir(paths.cliHistory);
|
||||
return paths.cliHistory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save conversation to SQLite
|
||||
* @param baseDir - Project base directory (NOT historyDir)
|
||||
*/
|
||||
async function saveConversationAsync(baseDir: string, conversation: ConversationRecord): Promise<void> {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
store.saveConversation(conversation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync wrapper for saveConversation (uses cached SQLite module)
|
||||
* @param baseDir - Project base directory (NOT historyDir)
|
||||
*/
|
||||
export function saveConversation(baseDir: string, conversation: ConversationRecord): void {
|
||||
try {
|
||||
const store = getSqliteStoreSync(baseDir);
|
||||
store.saveConversation(conversation);
|
||||
} catch {
|
||||
// If sync not available, queue for async save
|
||||
saveConversationAsync(baseDir, conversation).catch(err => {
|
||||
console.error('[CLI Executor] Failed to save conversation:', err.message);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load existing conversation by ID from SQLite
|
||||
* @param baseDir - Project base directory (NOT historyDir)
|
||||
*/
|
||||
async function loadConversationAsync(baseDir: string, conversationId: string): Promise<ConversationRecord | null> {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getConversation(conversationId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync wrapper for loadConversation (uses cached SQLite module)
|
||||
* @param baseDir - Project base directory (NOT historyDir)
|
||||
*/
|
||||
export function loadConversation(baseDir: string, conversationId: string): ConversationRecord | null {
|
||||
try {
|
||||
const store = getSqliteStoreSync(baseDir);
|
||||
return store.getConversation(conversationId);
|
||||
} catch {
|
||||
// SQLite not initialized yet, return null
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert legacy ExecutionRecord to ConversationRecord
|
||||
*/
|
||||
export function convertToConversation(record: ExecutionRecord): ConversationRecord {
|
||||
return {
|
||||
id: record.id,
|
||||
created_at: record.timestamp,
|
||||
updated_at: record.timestamp,
|
||||
tool: record.tool,
|
||||
model: record.model,
|
||||
mode: record.mode,
|
||||
category: 'user', // Legacy records default to user category
|
||||
total_duration_ms: record.duration_ms,
|
||||
turn_count: 1,
|
||||
latest_status: record.status,
|
||||
turns: [{
|
||||
turn: 1,
|
||||
timestamp: record.timestamp,
|
||||
prompt: record.prompt,
|
||||
duration_ms: record.duration_ms,
|
||||
status: record.status,
|
||||
exit_code: record.exit_code,
|
||||
output: record.output
|
||||
}]
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get execution history from SQLite (centralized storage)
|
||||
*/
|
||||
export async function getExecutionHistoryAsync(baseDir: string, options: {
|
||||
limit?: number;
|
||||
tool?: string | null;
|
||||
status?: string | null;
|
||||
category?: ExecutionCategory | null;
|
||||
search?: string | null;
|
||||
recursive?: boolean;
|
||||
} = {}): Promise<{
|
||||
total: number;
|
||||
count: number;
|
||||
executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[];
|
||||
}> {
|
||||
const { limit = 50, tool = null, status = null, category = null, search = null, recursive = false } = options;
|
||||
|
||||
// Recursive mode: aggregate data from parent and all child projects
|
||||
if (recursive) {
|
||||
const { scanChildProjectsAsync } = await import('../config/storage-paths.js');
|
||||
const childProjects = await scanChildProjectsAsync(baseDir);
|
||||
|
||||
let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Query parent project - apply limit at source to reduce memory footprint
|
||||
try {
|
||||
const parentStore = await getSqliteStore(baseDir);
|
||||
const parentResult = parentStore.getHistory({ limit, tool, status, category, search });
|
||||
totalCount += parentResult.total;
|
||||
|
||||
for (const exec of parentResult.executions) {
|
||||
allExecutions.push({ ...exec, sourceDir: baseDir });
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Query all child projects - apply limit to each child
|
||||
for (const child of childProjects) {
|
||||
try {
|
||||
const childStore = await getSqliteStore(child.projectPath);
|
||||
const childResult = childStore.getHistory({ limit, tool, status, category, search });
|
||||
totalCount += childResult.total;
|
||||
|
||||
for (const exec of childResult.executions) {
|
||||
allExecutions.push({
|
||||
...exec,
|
||||
sourceDir: child.relativePath // Show relative path for clarity
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp (newest first) and apply limit
|
||||
allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp));
|
||||
const limitedExecutions = allExecutions.slice(0, limit);
|
||||
|
||||
return {
|
||||
total: totalCount,
|
||||
count: limitedExecutions.length,
|
||||
executions: limitedExecutions
|
||||
};
|
||||
}
|
||||
|
||||
// Non-recursive mode: only query current project
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getHistory({ limit, tool, status, category, search });
|
||||
}
|
||||
|
||||
/**
|
||||
* Get execution history (sync version - uses cached SQLite module)
|
||||
*/
|
||||
export function getExecutionHistory(baseDir: string, options: {
|
||||
limit?: number;
|
||||
tool?: string | null;
|
||||
status?: string | null;
|
||||
recursive?: boolean;
|
||||
} = {}): {
|
||||
total: number;
|
||||
count: number;
|
||||
executions: (HistoryIndex['executions'][0] & { sourceDir?: string })[];
|
||||
} {
|
||||
const { limit = 50, tool = null, status = null, recursive = false } = options;
|
||||
|
||||
try {
|
||||
if (recursive) {
|
||||
const { scanChildProjects } = require('../config/storage-paths.js');
|
||||
const childProjects = scanChildProjects(baseDir);
|
||||
|
||||
let allExecutions: (HistoryIndex['executions'][0] & { sourceDir?: string })[] = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Query parent project - apply limit at source
|
||||
try {
|
||||
const parentStore = getSqliteStoreSync(baseDir);
|
||||
const parentResult = parentStore.getHistory({ limit, tool, status });
|
||||
totalCount += parentResult.total;
|
||||
|
||||
for (const exec of parentResult.executions) {
|
||||
allExecutions.push({ ...exec, sourceDir: baseDir });
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Query all child projects - apply limit to each child
|
||||
for (const child of childProjects) {
|
||||
try {
|
||||
const childStore = getSqliteStoreSync(child.projectPath);
|
||||
const childResult = childStore.getHistory({ limit, tool, status });
|
||||
totalCount += childResult.total;
|
||||
|
||||
for (const exec of childResult.executions) {
|
||||
allExecutions.push({
|
||||
...exec,
|
||||
sourceDir: child.relativePath // Show relative path for clarity
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp (newest first) and apply limit
|
||||
allExecutions.sort((a, b) => Number(b.timestamp) - Number(a.timestamp));
|
||||
const limitedExecutions = allExecutions.slice(0, limit);
|
||||
|
||||
return {
|
||||
total: totalCount,
|
||||
count: limitedExecutions.length,
|
||||
executions: limitedExecutions
|
||||
};
|
||||
}
|
||||
|
||||
const store = getSqliteStoreSync(baseDir);
|
||||
return store.getHistory({ limit, tool, status });
|
||||
} catch {
|
||||
// SQLite not initialized yet, return empty
|
||||
return { total: 0, count: 0, executions: [] };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversation detail by ID
|
||||
*/
|
||||
export function getConversationDetail(baseDir: string, conversationId: string): ConversationRecord | null {
|
||||
// Pass baseDir directly - loadConversation will resolve the correct storage path
|
||||
return loadConversation(baseDir, conversationId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversation detail with native session mapping info
|
||||
*/
|
||||
export function getConversationDetailWithNativeInfo(baseDir: string, conversationId: string) {
|
||||
try {
|
||||
const store = getSqliteStoreSync(baseDir);
|
||||
return store.getConversationWithNativeInfo(conversationId);
|
||||
} catch {
|
||||
// SQLite not initialized, return null
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get execution detail by ID (legacy, returns ExecutionRecord for backward compatibility)
|
||||
*/
|
||||
export function getExecutionDetail(baseDir: string, executionId: string): ExecutionRecord | null {
|
||||
const conversation = getConversationDetail(baseDir, executionId);
|
||||
if (!conversation) return null;
|
||||
|
||||
// Convert to legacy ExecutionRecord format (using latest turn)
|
||||
const latestTurn = conversation.turns[conversation.turns.length - 1];
|
||||
return {
|
||||
id: conversation.id,
|
||||
timestamp: conversation.created_at,
|
||||
tool: conversation.tool,
|
||||
model: conversation.model,
|
||||
mode: conversation.mode,
|
||||
prompt: latestTurn.prompt,
|
||||
status: conversation.latest_status,
|
||||
exit_code: latestTurn.exit_code,
|
||||
duration_ms: conversation.total_duration_ms,
|
||||
output: latestTurn.output
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete execution by ID (async version)
|
||||
*/
|
||||
export async function deleteExecutionAsync(baseDir: string, executionId: string): Promise<{ success: boolean; error?: string }> {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.deleteConversation(executionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete execution by ID (sync version - uses cached SQLite module)
|
||||
*/
|
||||
export function deleteExecution(baseDir: string, executionId: string): { success: boolean; error?: string } {
|
||||
try {
|
||||
const store = getSqliteStoreSync(baseDir);
|
||||
return store.deleteConversation(executionId);
|
||||
} catch {
|
||||
return { success: false, error: 'SQLite store not initialized' };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch delete executions (async)
|
||||
*/
|
||||
export async function batchDeleteExecutionsAsync(baseDir: string, ids: string[]): Promise<{
|
||||
success: boolean;
|
||||
deleted: number;
|
||||
total: number;
|
||||
errors?: string[];
|
||||
}> {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
const result = store.batchDelete(ids);
|
||||
return { ...result, total: ids.length };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get latest execution for a specific tool
|
||||
*/
|
||||
export function getLatestExecution(baseDir: string, tool?: string): ExecutionRecord | null {
|
||||
const history = getExecutionHistory(baseDir, { limit: 1, tool: tool || null });
|
||||
if (history.executions.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return getExecutionDetail(baseDir, history.executions[0].id);
|
||||
}
|
||||
|
||||
// ========== Native Session Content Functions ==========
|
||||
|
||||
/**
|
||||
* Get native session content by CCW ID
|
||||
* Parses the native session file and returns full conversation data
|
||||
*/
|
||||
export async function getNativeSessionContent(baseDir: string, ccwId: string) {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getNativeSessionContent(ccwId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get formatted native conversation text
|
||||
*/
|
||||
export async function getFormattedNativeConversation(baseDir: string, ccwId: string, options?: {
|
||||
includeThoughts?: boolean;
|
||||
includeToolCalls?: boolean;
|
||||
includeTokens?: boolean;
|
||||
maxContentLength?: number;
|
||||
}) {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getFormattedNativeConversation(ccwId, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversation pairs from native session
|
||||
*/
|
||||
export async function getNativeConversationPairs(baseDir: string, ccwId: string) {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getNativeConversationPairs(ccwId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get enriched conversation (CCW + native session merged)
|
||||
*/
|
||||
export async function getEnrichedConversation(baseDir: string, ccwId: string) {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getEnrichedConversation(ccwId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get history with native session info
|
||||
* Supports recursive querying of child projects
|
||||
*/
|
||||
export async function getHistoryWithNativeInfo(baseDir: string, options?: {
|
||||
limit?: number;
|
||||
offset?: number;
|
||||
tool?: string | null;
|
||||
status?: string | null;
|
||||
category?: ExecutionCategory | null;
|
||||
search?: string | null;
|
||||
recursive?: boolean;
|
||||
}) {
|
||||
const { limit = 50, recursive = false, ...queryOptions } = options || {};
|
||||
|
||||
// Non-recursive mode: query single project
|
||||
if (!recursive) {
|
||||
const store = await getSqliteStore(baseDir);
|
||||
return store.getHistoryWithNativeInfo({ limit, ...queryOptions });
|
||||
}
|
||||
|
||||
// Recursive mode: aggregate data from parent and all child projects
|
||||
const { scanChildProjectsAsync } = await import('../config/storage-paths.js');
|
||||
const childProjects = await scanChildProjectsAsync(baseDir);
|
||||
|
||||
// Use the same type as store.getHistoryWithNativeInfo returns
|
||||
type ExecutionWithNativeAndSource = HistoryIndexEntry & {
|
||||
hasNativeSession: boolean;
|
||||
nativeSessionId?: string;
|
||||
nativeSessionPath?: string;
|
||||
};
|
||||
|
||||
const allExecutions: ExecutionWithNativeAndSource[] = [];
|
||||
let totalCount = 0;
|
||||
|
||||
// Query parent project
|
||||
try {
|
||||
const parentStore = await getSqliteStore(baseDir);
|
||||
const parentResult = parentStore.getHistoryWithNativeInfo({ limit, ...queryOptions });
|
||||
totalCount += parentResult.total;
|
||||
|
||||
for (const exec of parentResult.executions) {
|
||||
allExecutions.push({ ...exec, sourceDir: baseDir });
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query parent project ${baseDir}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
// Query all child projects
|
||||
for (const child of childProjects) {
|
||||
try {
|
||||
const childStore = await getSqliteStore(child.projectPath);
|
||||
const childResult = childStore.getHistoryWithNativeInfo({ limit, ...queryOptions });
|
||||
totalCount += childResult.total;
|
||||
|
||||
for (const exec of childResult.executions) {
|
||||
allExecutions.push({ ...exec, sourceDir: child.projectPath });
|
||||
}
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG) {
|
||||
console.error(`[CLI History] Failed to query child project ${child.projectPath}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by updated_at descending and apply limit
|
||||
allExecutions.sort((a, b) => {
|
||||
const timeA = a.updated_at ? new Date(a.updated_at).getTime() : new Date(a.timestamp).getTime();
|
||||
const timeB = b.updated_at ? new Date(b.updated_at).getTime() : new Date(b.timestamp).getTime();
|
||||
return timeB - timeA;
|
||||
});
|
||||
const limitedExecutions = allExecutions.slice(0, limit);
|
||||
|
||||
return {
|
||||
total: totalCount,
|
||||
count: limitedExecutions.length,
|
||||
executions: limitedExecutions
|
||||
};
|
||||
}
|
||||
306
ccw/src/tools/cli-executor-utils.ts
Normal file
306
ccw/src/tools/cli-executor-utils.ts
Normal file
@@ -0,0 +1,306 @@
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
// Debug logging utility - check env at runtime for --debug flag support
|
||||
export function isDebugEnabled(): boolean {
|
||||
return process.env.DEBUG === 'true' || process.env.DEBUG === '1' || process.env.CCW_DEBUG === 'true';
|
||||
}
|
||||
|
||||
export function debugLog(category: string, message: string, data?: Record<string, unknown>): void {
|
||||
if (!isDebugEnabled()) return;
|
||||
const timestamp = new Date().toISOString();
|
||||
const prefix = `[${timestamp}] [CLI-DEBUG] [${category}]`;
|
||||
if (data) {
|
||||
console.error(`${prefix} ${message}`, JSON.stringify(data, null, 2));
|
||||
} else {
|
||||
console.error(`${prefix} ${message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function errorLog(
|
||||
category: string,
|
||||
message: string,
|
||||
error?: Error | unknown,
|
||||
context?: Record<string, unknown>
|
||||
): void {
|
||||
const timestamp = new Date().toISOString();
|
||||
const prefix = `[${timestamp}] [CLI-ERROR] [${category}]`;
|
||||
console.error(`${prefix} ${message}`);
|
||||
if (error instanceof Error) {
|
||||
console.error(`${prefix} Error: ${error.message}`);
|
||||
if (isDebugEnabled() && error.stack) {
|
||||
console.error(`${prefix} Stack: ${error.stack}`);
|
||||
}
|
||||
} else if (error) {
|
||||
console.error(`${prefix} Error: ${String(error)}`);
|
||||
}
|
||||
if (context) {
|
||||
console.error(`${prefix} Context:`, JSON.stringify(context, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
export interface ToolAvailability {
|
||||
available: boolean;
|
||||
path: string | null;
|
||||
}
|
||||
|
||||
// Tool availability cache with TTL
|
||||
interface CachedToolAvailability {
|
||||
result: ToolAvailability;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
// Cache storage: Map<toolName, CachedToolAvailability>
|
||||
const toolAvailabilityCache = new Map<string, CachedToolAvailability>();
|
||||
const CACHE_TTL_MS = 5 * 60 * 1000; // 5 minutes
|
||||
|
||||
function isCacheValid(cached: CachedToolAvailability): boolean {
|
||||
return Date.now() - cached.timestamp < CACHE_TTL_MS;
|
||||
}
|
||||
|
||||
function clearExpiredCache(): void {
|
||||
const now = Date.now();
|
||||
const entriesToDelete: string[] = [];
|
||||
|
||||
toolAvailabilityCache.forEach((cached, tool) => {
|
||||
if (now - cached.timestamp >= CACHE_TTL_MS) {
|
||||
entriesToDelete.push(tool);
|
||||
}
|
||||
});
|
||||
|
||||
entriesToDelete.forEach((tool) => toolAvailabilityCache.delete(tool));
|
||||
}
|
||||
|
||||
export function clearToolCache(): void {
|
||||
toolAvailabilityCache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a CLI tool is available (with caching)
|
||||
*/
|
||||
export async function checkToolAvailability(tool: string): Promise<ToolAvailability> {
|
||||
debugLog('TOOL_CHECK', `Checking availability for tool: ${tool}`);
|
||||
|
||||
const cached = toolAvailabilityCache.get(tool);
|
||||
if (cached && isCacheValid(cached)) {
|
||||
debugLog('TOOL_CHECK', `Cache hit for ${tool}`, { available: cached.result.available, path: cached.result.path });
|
||||
return cached.result;
|
||||
}
|
||||
|
||||
clearExpiredCache();
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
const command = isWindows ? 'where' : 'which';
|
||||
|
||||
debugLog('TOOL_CHECK', `Running ${command} ${tool}`, { platform: process.platform });
|
||||
|
||||
const child = spawn(command, [tool], {
|
||||
shell: false,
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
child.stdout!.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
child.stderr?.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
const result: ToolAvailability = code === 0 && stdout.trim()
|
||||
? { available: true, path: stdout.trim().split('\n')[0] }
|
||||
: { available: false, path: null };
|
||||
|
||||
if (result.available) {
|
||||
debugLog('TOOL_CHECK', `Tool ${tool} found`, { path: result.path });
|
||||
toolAvailabilityCache.set(tool, {
|
||||
result,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
} else {
|
||||
debugLog('TOOL_CHECK', `Tool ${tool} not found`, { exitCode: code, stderr: stderr.trim() || '(empty)' });
|
||||
}
|
||||
|
||||
resolve(result);
|
||||
});
|
||||
|
||||
child.on('error', (error) => {
|
||||
errorLog('TOOL_CHECK', `Failed to check tool availability: ${tool}`, error, { command, tool });
|
||||
resolve({ available: false, path: null });
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
child.kill();
|
||||
debugLog('TOOL_CHECK', `Timeout checking tool ${tool} (5s)`);
|
||||
resolve({ available: false, path: null });
|
||||
}, 5000);
|
||||
});
|
||||
}
|
||||
|
||||
// Native resume configuration
|
||||
export interface NativeResumeConfig {
|
||||
enabled: boolean;
|
||||
sessionId?: string; // Native UUID
|
||||
isLatest?: boolean; // Use latest/--last flag
|
||||
}
|
||||
|
||||
/**
|
||||
* Build command arguments based on tool and options
|
||||
*/
|
||||
export function buildCommand(params: {
|
||||
tool: string;
|
||||
prompt: string;
|
||||
mode: string;
|
||||
model?: string;
|
||||
dir?: string;
|
||||
include?: string;
|
||||
nativeResume?: NativeResumeConfig;
|
||||
}): { command: string; args: string[]; useStdin: boolean } {
|
||||
const { tool, prompt, mode = 'analysis', model, dir, include, nativeResume } = params;
|
||||
|
||||
debugLog('BUILD_CMD', `Building command for tool: ${tool}`, {
|
||||
mode,
|
||||
model: model || '(default)',
|
||||
dir: dir || '(cwd)',
|
||||
include: include || '(none)',
|
||||
nativeResume: nativeResume
|
||||
? { enabled: nativeResume.enabled, isLatest: nativeResume.isLatest, sessionId: nativeResume.sessionId }
|
||||
: '(none)',
|
||||
promptLength: prompt.length,
|
||||
});
|
||||
|
||||
let command = tool;
|
||||
let args: string[] = [];
|
||||
// Default to stdin for all tools to avoid escaping issues on Windows
|
||||
let useStdin = true;
|
||||
|
||||
switch (tool) {
|
||||
case 'gemini':
|
||||
if (nativeResume?.enabled) {
|
||||
if (nativeResume.isLatest) {
|
||||
args.push('-r', 'latest');
|
||||
} else if (nativeResume.sessionId) {
|
||||
args.push('-r', nativeResume.sessionId);
|
||||
}
|
||||
}
|
||||
if (model) {
|
||||
args.push('-m', model);
|
||||
}
|
||||
if (mode === 'write') {
|
||||
args.push('--approval-mode', 'yolo');
|
||||
}
|
||||
if (include) {
|
||||
args.push('--include-directories', include);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'qwen':
|
||||
if (nativeResume?.enabled) {
|
||||
if (nativeResume.isLatest) {
|
||||
args.push('--continue');
|
||||
} else if (nativeResume.sessionId) {
|
||||
args.push('--resume', nativeResume.sessionId);
|
||||
}
|
||||
}
|
||||
if (model) {
|
||||
args.push('-m', model);
|
||||
}
|
||||
if (mode === 'write') {
|
||||
args.push('--approval-mode', 'yolo');
|
||||
}
|
||||
if (include) {
|
||||
args.push('--include-directories', include);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'codex':
|
||||
useStdin = true;
|
||||
if (nativeResume?.enabled) {
|
||||
args.push('resume');
|
||||
if (nativeResume.isLatest) {
|
||||
args.push('--last');
|
||||
} else if (nativeResume.sessionId) {
|
||||
args.push(nativeResume.sessionId);
|
||||
}
|
||||
if (mode === 'write' || mode === 'auto') {
|
||||
args.push('--dangerously-bypass-approvals-and-sandbox');
|
||||
} else {
|
||||
args.push('--full-auto');
|
||||
}
|
||||
if (model) {
|
||||
args.push('-m', model);
|
||||
}
|
||||
if (include) {
|
||||
const dirs = include.split(',').map((d) => d.trim()).filter((d) => d);
|
||||
for (const addDir of dirs) {
|
||||
args.push('--add-dir', addDir);
|
||||
}
|
||||
}
|
||||
args.push('-');
|
||||
} else {
|
||||
args.push('exec');
|
||||
if (mode === 'write' || mode === 'auto') {
|
||||
args.push('--dangerously-bypass-approvals-and-sandbox');
|
||||
} else {
|
||||
args.push('--full-auto');
|
||||
}
|
||||
if (model) {
|
||||
args.push('-m', model);
|
||||
}
|
||||
if (include) {
|
||||
const dirs = include.split(',').map((d) => d.trim()).filter((d) => d);
|
||||
for (const addDir of dirs) {
|
||||
args.push('--add-dir', addDir);
|
||||
}
|
||||
}
|
||||
args.push('-');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'claude':
|
||||
// Claude Code: claude -p "prompt" for non-interactive mode
|
||||
args.push('-p'); // Print mode (non-interactive)
|
||||
// Native resume: claude --resume <session-id> or --continue
|
||||
if (nativeResume?.enabled) {
|
||||
if (nativeResume.isLatest) {
|
||||
args.push('--continue');
|
||||
} else if (nativeResume.sessionId) {
|
||||
args.push('--resume', nativeResume.sessionId);
|
||||
}
|
||||
}
|
||||
if (model) {
|
||||
args.push('--model', model);
|
||||
}
|
||||
// Permission modes: write/auto → bypassPermissions, analysis → default
|
||||
if (mode === 'write' || mode === 'auto') {
|
||||
args.push('--permission-mode', 'bypassPermissions');
|
||||
} else {
|
||||
args.push('--permission-mode', 'default');
|
||||
}
|
||||
// Output format for better parsing
|
||||
args.push('--output-format', 'text');
|
||||
// Add directories
|
||||
if (include) {
|
||||
const dirs = include.split(',').map((d) => d.trim()).filter((d) => d);
|
||||
for (const addDir of dirs) {
|
||||
args.push('--add-dir', addDir);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
errorLog('BUILD_CMD', `Unknown CLI tool: ${tool}`);
|
||||
throw new Error(`Unknown CLI tool: ${tool}`);
|
||||
}
|
||||
|
||||
debugLog('BUILD_CMD', `Command built successfully`, {
|
||||
command,
|
||||
args,
|
||||
useStdin,
|
||||
fullCommand: `${command} ${args.join(' ')}${useStdin ? ' (stdin)' : ''}`,
|
||||
});
|
||||
|
||||
return { command, args, useStdin };
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
434
ccw/src/tools/cli-prompt-builder.ts
Normal file
434
ccw/src/tools/cli-prompt-builder.ts
Normal file
@@ -0,0 +1,434 @@
|
||||
/**
|
||||
* CLI Prompt Builder
|
||||
* Prompt concatenation + multi-turn formatting helpers
|
||||
*/
|
||||
|
||||
import type { ConversationRecord, ConversationTurn } from './cli-executor-state.js';
|
||||
|
||||
// Prompt concatenation format types
|
||||
export type PromptFormat = 'plain' | 'yaml' | 'json';
|
||||
|
||||
/**
|
||||
* Merge multiple conversations into a unified context
|
||||
* Returns merged turns sorted by timestamp with source tracking
|
||||
*/
|
||||
export interface MergedTurn extends ConversationTurn {
|
||||
source_id: string; // Original conversation ID
|
||||
}
|
||||
|
||||
export interface MergeResult {
|
||||
mergedTurns: MergedTurn[];
|
||||
sourceConversations: ConversationRecord[];
|
||||
totalDuration: number;
|
||||
}
|
||||
|
||||
export function mergeConversations(conversations: ConversationRecord[]): MergeResult {
|
||||
const mergedTurns: MergedTurn[] = [];
|
||||
|
||||
// Collect all turns with source tracking
|
||||
for (const conv of conversations) {
|
||||
for (const turn of conv.turns) {
|
||||
mergedTurns.push({
|
||||
...turn,
|
||||
source_id: conv.id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp
|
||||
mergedTurns.sort((a, b) => new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime());
|
||||
|
||||
// Re-number turns
|
||||
mergedTurns.forEach((turn, idx) => {
|
||||
turn.turn = idx + 1;
|
||||
});
|
||||
|
||||
// Calculate total duration
|
||||
const totalDuration = mergedTurns.reduce((sum, t) => sum + t.duration_ms, 0);
|
||||
|
||||
return {
|
||||
mergedTurns,
|
||||
sourceConversations: conversations,
|
||||
totalDuration
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build prompt from merged conversations
|
||||
*/
|
||||
export function buildMergedPrompt(
|
||||
mergeResult: MergeResult,
|
||||
newPrompt: string,
|
||||
format: PromptFormat = 'plain'
|
||||
): string {
|
||||
const concatenator = createPromptConcatenator({ format });
|
||||
|
||||
// Set metadata for merged conversations
|
||||
concatenator.setMetadata(
|
||||
'merged_sources',
|
||||
mergeResult.sourceConversations.map(c => c.id).join(', ')
|
||||
);
|
||||
|
||||
// Add all merged turns with source tracking
|
||||
for (const turn of mergeResult.mergedTurns) {
|
||||
concatenator.addFromConversationTurn(turn, turn.source_id);
|
||||
}
|
||||
|
||||
return concatenator.build(newPrompt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn data structure for concatenation
|
||||
*/
|
||||
interface TurnData {
|
||||
turn: number;
|
||||
timestamp?: string;
|
||||
role: 'user' | 'assistant';
|
||||
content: string;
|
||||
status?: string;
|
||||
duration_ms?: number;
|
||||
source_id?: string; // For merged conversations
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompt concatenation options
|
||||
*/
|
||||
export interface ConcatOptions {
|
||||
format: PromptFormat;
|
||||
includeMetadata?: boolean;
|
||||
includeTurnMarkers?: boolean;
|
||||
maxOutputLength?: number; // Truncate output for context efficiency
|
||||
}
|
||||
|
||||
/**
|
||||
* PromptConcatenator - Dedicated class for building multi-turn prompts
|
||||
* Supports multiple output formats: plain text, YAML, JSON
|
||||
*/
|
||||
export class PromptConcatenator {
|
||||
private turns: TurnData[] = [];
|
||||
private options: ConcatOptions;
|
||||
private metadata: Record<string, unknown> = {};
|
||||
|
||||
constructor(options: Partial<ConcatOptions> = {}) {
|
||||
this.options = {
|
||||
format: options.format || 'plain',
|
||||
includeMetadata: options.includeMetadata ?? true,
|
||||
includeTurnMarkers: options.includeTurnMarkers ?? true,
|
||||
maxOutputLength: options.maxOutputLength || 8192
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set metadata for the conversation
|
||||
*/
|
||||
setMetadata(key: string, value: unknown): this {
|
||||
this.metadata[key] = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a user turn
|
||||
*/
|
||||
addUserTurn(content: string, options: Partial<Omit<TurnData, 'role' | 'content'>> = {}): this {
|
||||
this.turns.push({
|
||||
turn: this.turns.length + 1,
|
||||
role: 'user',
|
||||
content,
|
||||
...options
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an assistant turn
|
||||
*/
|
||||
addAssistantTurn(content: string, options: Partial<Omit<TurnData, 'role' | 'content'>> = {}): this {
|
||||
// Truncate output if needed
|
||||
const truncatedContent = content.length > this.options.maxOutputLength!
|
||||
? content.substring(0, this.options.maxOutputLength!) + '\n... [truncated]'
|
||||
: content;
|
||||
|
||||
this.turns.push({
|
||||
turn: this.turns.length + 1,
|
||||
role: 'assistant',
|
||||
content: truncatedContent,
|
||||
...options
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a conversation turn from ConversationTurn
|
||||
*/
|
||||
addFromConversationTurn(turn: ConversationTurn, sourceId?: string): this {
|
||||
this.addUserTurn(turn.prompt, {
|
||||
turn: turn.turn * 2 - 1,
|
||||
timestamp: turn.timestamp,
|
||||
source_id: sourceId
|
||||
});
|
||||
this.addAssistantTurn(turn.output.stdout || '[No output]', {
|
||||
turn: turn.turn * 2,
|
||||
timestamp: turn.timestamp,
|
||||
status: turn.status,
|
||||
duration_ms: turn.duration_ms,
|
||||
source_id: sourceId
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load turns from an existing conversation
|
||||
*/
|
||||
loadConversation(conversation: ConversationRecord): this {
|
||||
for (const turn of conversation.turns) {
|
||||
this.addFromConversationTurn(turn);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the final prompt in plain text format
|
||||
*/
|
||||
private buildPlainText(newPrompt: string): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Metadata section
|
||||
if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) {
|
||||
parts.push('=== CONTEXT ===');
|
||||
for (const [key, value] of Object.entries(this.metadata)) {
|
||||
parts.push(`${key}: ${String(value)}`);
|
||||
}
|
||||
parts.push('');
|
||||
}
|
||||
|
||||
// Conversation history
|
||||
if (this.turns.length > 0) {
|
||||
parts.push('=== CONVERSATION HISTORY ===');
|
||||
parts.push('');
|
||||
|
||||
let currentTurn = 0;
|
||||
for (let i = 0; i < this.turns.length; i += 2) {
|
||||
currentTurn++;
|
||||
const userTurn = this.turns[i];
|
||||
const assistantTurn = this.turns[i + 1];
|
||||
|
||||
if (this.options.includeTurnMarkers) {
|
||||
const sourceMarker = userTurn.source_id ? ` [${userTurn.source_id}]` : '';
|
||||
parts.push(`--- Turn ${currentTurn}${sourceMarker} ---`);
|
||||
}
|
||||
|
||||
parts.push('USER:');
|
||||
parts.push(userTurn.content);
|
||||
parts.push('');
|
||||
|
||||
if (assistantTurn) {
|
||||
parts.push('ASSISTANT:');
|
||||
parts.push(assistantTurn.content);
|
||||
parts.push('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// New request
|
||||
parts.push('=== NEW REQUEST ===');
|
||||
parts.push('');
|
||||
parts.push(newPrompt);
|
||||
|
||||
return parts.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the final prompt in YAML format
|
||||
*/
|
||||
private buildYaml(newPrompt: string): string {
|
||||
const yamlLines: string[] = [];
|
||||
|
||||
// Metadata
|
||||
if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) {
|
||||
yamlLines.push('context:');
|
||||
for (const [key, value] of Object.entries(this.metadata)) {
|
||||
yamlLines.push(` ${key}: ${this.yamlValue(value)}`);
|
||||
}
|
||||
yamlLines.push('');
|
||||
}
|
||||
|
||||
// Conversation history
|
||||
if (this.turns.length > 0) {
|
||||
yamlLines.push('conversation:');
|
||||
|
||||
let currentTurn = 0;
|
||||
for (let i = 0; i < this.turns.length; i += 2) {
|
||||
currentTurn++;
|
||||
const userTurn = this.turns[i];
|
||||
const assistantTurn = this.turns[i + 1];
|
||||
|
||||
yamlLines.push(` - turn: ${currentTurn}`);
|
||||
if (userTurn.source_id) {
|
||||
yamlLines.push(` source: ${userTurn.source_id}`);
|
||||
}
|
||||
if (userTurn.timestamp) {
|
||||
yamlLines.push(` timestamp: ${userTurn.timestamp}`);
|
||||
}
|
||||
|
||||
// User message
|
||||
yamlLines.push(' user: |');
|
||||
const userLines = userTurn.content.split('\n');
|
||||
for (const line of userLines) {
|
||||
yamlLines.push(` ${line}`);
|
||||
}
|
||||
|
||||
// Assistant message
|
||||
if (assistantTurn) {
|
||||
if (assistantTurn.status) {
|
||||
yamlLines.push(` status: ${assistantTurn.status}`);
|
||||
}
|
||||
if (assistantTurn.duration_ms) {
|
||||
yamlLines.push(` duration_ms: ${assistantTurn.duration_ms}`);
|
||||
}
|
||||
yamlLines.push(' assistant: |');
|
||||
const assistantLines = assistantTurn.content.split('\n');
|
||||
for (const line of assistantLines) {
|
||||
yamlLines.push(` ${line}`);
|
||||
}
|
||||
}
|
||||
yamlLines.push('');
|
||||
}
|
||||
}
|
||||
|
||||
// New request
|
||||
yamlLines.push('new_request: |');
|
||||
const requestLines = newPrompt.split('\n');
|
||||
for (const line of requestLines) {
|
||||
yamlLines.push(` ${line}`);
|
||||
}
|
||||
|
||||
return yamlLines.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the final prompt in JSON format
|
||||
*/
|
||||
private buildJson(newPrompt: string): string {
|
||||
const data: Record<string, unknown> = {};
|
||||
|
||||
// Metadata
|
||||
if (this.options.includeMetadata && Object.keys(this.metadata).length > 0) {
|
||||
data.context = this.metadata;
|
||||
}
|
||||
|
||||
// Conversation history
|
||||
if (this.turns.length > 0) {
|
||||
const conversation: Array<{
|
||||
turn: number;
|
||||
source?: string;
|
||||
timestamp?: string;
|
||||
user: string;
|
||||
assistant?: string;
|
||||
status?: string;
|
||||
duration_ms?: number;
|
||||
}> = [];
|
||||
|
||||
for (let i = 0; i < this.turns.length; i += 2) {
|
||||
const userTurn = this.turns[i];
|
||||
const assistantTurn = this.turns[i + 1];
|
||||
|
||||
const turnData: typeof conversation[0] = {
|
||||
turn: Math.ceil((i + 1) / 2),
|
||||
user: userTurn.content
|
||||
};
|
||||
|
||||
if (userTurn.source_id) turnData.source = userTurn.source_id;
|
||||
if (userTurn.timestamp) turnData.timestamp = userTurn.timestamp;
|
||||
if (assistantTurn) {
|
||||
turnData.assistant = assistantTurn.content;
|
||||
if (assistantTurn.status) turnData.status = assistantTurn.status;
|
||||
if (assistantTurn.duration_ms) turnData.duration_ms = assistantTurn.duration_ms;
|
||||
}
|
||||
|
||||
conversation.push(turnData);
|
||||
}
|
||||
|
||||
data.conversation = conversation;
|
||||
}
|
||||
|
||||
data.new_request = newPrompt;
|
||||
|
||||
return JSON.stringify(data, null, 2);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to format YAML values
|
||||
*/
|
||||
private yamlValue(value: unknown): string {
|
||||
if (typeof value === 'string') {
|
||||
// Quote strings that might be interpreted as other types
|
||||
if (/[:\[\]{}#&*!|>'"@`]/.test(value) || value === '') {
|
||||
return `"${value.replace(/"/g, '\\"')}"`;
|
||||
}
|
||||
return value;
|
||||
}
|
||||
if (typeof value === 'number' || typeof value === 'boolean') {
|
||||
return String(value);
|
||||
}
|
||||
if (value === null || value === undefined) {
|
||||
return 'null';
|
||||
}
|
||||
return JSON.stringify(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the final prompt string
|
||||
*/
|
||||
build(newPrompt: string): string {
|
||||
switch (this.options.format) {
|
||||
case 'yaml':
|
||||
return this.buildYaml(newPrompt);
|
||||
case 'json':
|
||||
return this.buildJson(newPrompt);
|
||||
case 'plain':
|
||||
default:
|
||||
return this.buildPlainText(newPrompt);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the concatenator for reuse
|
||||
*/
|
||||
reset(): this {
|
||||
this.turns = [];
|
||||
this.metadata = {};
|
||||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a prompt concatenator with specified options
|
||||
*/
|
||||
export function createPromptConcatenator(options?: Partial<ConcatOptions>): PromptConcatenator {
|
||||
return new PromptConcatenator(options);
|
||||
}
|
||||
|
||||
/**
|
||||
* Quick helper to build a multi-turn prompt in any format
|
||||
*/
|
||||
export function buildPrompt(
|
||||
conversation: ConversationRecord,
|
||||
newPrompt: string,
|
||||
format: PromptFormat = 'plain'
|
||||
): string {
|
||||
return createPromptConcatenator({ format })
|
||||
.loadConversation(conversation)
|
||||
.build(newPrompt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Build multi-turn prompt with full conversation history
|
||||
* Uses the PromptConcatenator with plain text format by default
|
||||
*/
|
||||
export function buildMultiTurnPrompt(
|
||||
conversation: ConversationRecord,
|
||||
newPrompt: string,
|
||||
format: PromptFormat = 'plain'
|
||||
): string {
|
||||
return buildPrompt(conversation, newPrompt, format);
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import { join, dirname } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { getSystemPython } from '../utils/python-utils.js';
|
||||
import { EXEC_TIMEOUTS } from '../utils/exec-constants.js';
|
||||
|
||||
// Get directory of this module
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
@@ -340,7 +341,7 @@ async function ensureLiteLLMEmbedderReady(): Promise<BootstrapResult> {
|
||||
for (const localPath of possiblePaths) {
|
||||
if (existsSync(join(localPath, 'pyproject.toml'))) {
|
||||
console.log(`[CodexLens] Installing ccw-litellm from local path: ${localPath}`);
|
||||
execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit' });
|
||||
execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
installed = true;
|
||||
break;
|
||||
}
|
||||
@@ -348,7 +349,7 @@ async function ensureLiteLLMEmbedderReady(): Promise<BootstrapResult> {
|
||||
|
||||
if (!installed) {
|
||||
console.log('[CodexLens] Installing ccw-litellm from PyPI...');
|
||||
execSync(`"${pipPath}" install ccw-litellm`, { stdio: 'inherit' });
|
||||
execSync(`"${pipPath}" install ccw-litellm`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
@@ -426,11 +427,11 @@ async function detectGpuSupport(): Promise<{ mode: GpuMode; available: GpuMode[]
|
||||
// Check for NVIDIA GPU (CUDA)
|
||||
try {
|
||||
if (process.platform === 'win32') {
|
||||
execSync('nvidia-smi', { stdio: 'pipe' });
|
||||
execSync('nvidia-smi', { stdio: 'pipe', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
available.push('cuda');
|
||||
detectedInfo = 'NVIDIA GPU detected (CUDA available)';
|
||||
} else {
|
||||
execSync('which nvidia-smi', { stdio: 'pipe' });
|
||||
execSync('which nvidia-smi', { stdio: 'pipe', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
available.push('cuda');
|
||||
detectedInfo = 'NVIDIA GPU detected (CUDA available)';
|
||||
}
|
||||
@@ -503,7 +504,7 @@ async function installSemantic(gpuMode: GpuMode = 'cpu'): Promise<BootstrapResul
|
||||
|
||||
for (const pkg of onnxVariants) {
|
||||
try {
|
||||
execSync(`"${pipPath}" uninstall ${pkg} -y`, { stdio: 'pipe' });
|
||||
execSync(`"${pipPath}" uninstall ${pkg} -y`, { stdio: 'pipe', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
console.log(`[CodexLens] Removed ${pkg}`);
|
||||
} catch {
|
||||
// Package not installed, ignore
|
||||
@@ -587,7 +588,7 @@ async function installSemantic(gpuMode: GpuMode = 'cpu'): Promise<BootstrapResul
|
||||
if (gpuMode !== 'cpu') {
|
||||
try {
|
||||
console.log(`[CodexLens] Reinstalling ${onnxPackage} to ensure GPU provider works...`);
|
||||
execSync(`"${pipPath}" install --force-reinstall ${onnxPackage}`, { stdio: 'pipe', timeout: 300000 });
|
||||
execSync(`"${pipPath}" install --force-reinstall ${onnxPackage}`, { stdio: 'pipe', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
console.log(`[CodexLens] ${onnxPackage} reinstalled successfully`);
|
||||
} catch (e) {
|
||||
console.warn(`[CodexLens] Warning: Failed to reinstall ${onnxPackage}: ${(e as Error).message}`);
|
||||
@@ -626,7 +627,7 @@ async function bootstrapVenv(): Promise<BootstrapResult> {
|
||||
try {
|
||||
console.log('[CodexLens] Creating virtual environment...');
|
||||
const pythonCmd = getSystemPython();
|
||||
execSync(`${pythonCmd} -m venv "${CODEXLENS_VENV}"`, { stdio: 'inherit' });
|
||||
execSync(`${pythonCmd} -m venv "${CODEXLENS_VENV}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PROCESS_SPAWN });
|
||||
} catch (err) {
|
||||
return { success: false, error: `Failed to create venv: ${(err as Error).message}` };
|
||||
}
|
||||
@@ -651,7 +652,7 @@ async function bootstrapVenv(): Promise<BootstrapResult> {
|
||||
for (const localPath of possiblePaths) {
|
||||
if (existsSync(join(localPath, 'pyproject.toml'))) {
|
||||
console.log(`[CodexLens] Installing from local path: ${localPath}`);
|
||||
execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit' });
|
||||
execSync(`"${pipPath}" install -e "${localPath}"`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
installed = true;
|
||||
break;
|
||||
}
|
||||
@@ -659,7 +660,7 @@ async function bootstrapVenv(): Promise<BootstrapResult> {
|
||||
|
||||
if (!installed) {
|
||||
console.log('[CodexLens] Installing from PyPI...');
|
||||
execSync(`"${pipPath}" install codexlens`, { stdio: 'inherit' });
|
||||
execSync(`"${pipPath}" install codexlens`, { stdio: 'inherit', timeout: EXEC_TIMEOUTS.PACKAGE_INSTALL });
|
||||
}
|
||||
|
||||
// Clear cache after successful installation
|
||||
@@ -1368,7 +1369,7 @@ async function uninstallCodexLens(): Promise<BootstrapResult> {
|
||||
const { execSync } = await import('child_process');
|
||||
try {
|
||||
// Kill any python processes from our venv that might be holding file locks
|
||||
execSync(`taskkill /F /IM python.exe /FI "MODULES eq sqlite3" 2>nul`, { stdio: 'ignore' });
|
||||
execSync(`taskkill /F /IM python.exe /FI "MODULES eq sqlite3" 2>nul`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
} catch {
|
||||
// Ignore errors - no processes to kill
|
||||
}
|
||||
@@ -1397,7 +1398,7 @@ async function uninstallCodexLens(): Promise<BootstrapResult> {
|
||||
try {
|
||||
const { execSync } = await import('child_process');
|
||||
// Try to close handles on the specific file
|
||||
execSync(`handle -c ${err.path} -y 2>nul`, { stdio: 'ignore' });
|
||||
execSync(`handle -c ${err.path} -y 2>nul`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
} catch {
|
||||
// handle.exe may not be installed, ignore
|
||||
}
|
||||
@@ -1454,7 +1455,7 @@ function cancelIndexing(): { success: boolean; message?: string; error?: string
|
||||
// On Windows, use taskkill to kill the process tree
|
||||
const { execSync } = require('child_process');
|
||||
try {
|
||||
execSync(`taskkill /pid ${currentIndexingProcess.pid} /T /F`, { stdio: 'ignore' });
|
||||
execSync(`taskkill /pid ${currentIndexingProcess.pid} /T /F`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
} catch {
|
||||
// Process may have already exited
|
||||
}
|
||||
|
||||
@@ -8,6 +8,15 @@ import type { ToolSchema, ToolResult } from '../types/tool.js';
|
||||
import { readdirSync, statSync, existsSync } from 'fs';
|
||||
import { join, resolve, dirname, extname, relative } from 'path';
|
||||
import { execSync } from 'child_process';
|
||||
import { EXEC_TIMEOUTS } from '../utils/exec-constants.js';
|
||||
|
||||
function isExecTimeoutError(error: unknown): boolean {
|
||||
const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null;
|
||||
const code = err?.code ?? err?.errno;
|
||||
if (code === 'ETIMEDOUT') return true;
|
||||
const message = typeof err?.message === 'string' ? err.message : '';
|
||||
return message.includes('ETIMEDOUT');
|
||||
}
|
||||
|
||||
// Source file extensions to track
|
||||
const SOURCE_EXTENSIONS = [
|
||||
@@ -53,9 +62,12 @@ interface ToolOutput {
|
||||
*/
|
||||
function isGitRepo(basePath: string): boolean {
|
||||
try {
|
||||
execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe' });
|
||||
execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe', timeout: EXEC_TIMEOUTS.GIT_QUICK });
|
||||
return true;
|
||||
} catch (e) {
|
||||
} catch (e: unknown) {
|
||||
if (isExecTimeoutError(e)) {
|
||||
console.warn(`[detect_changed_modules] git rev-parse timed out after ${EXEC_TIMEOUTS.GIT_QUICK}ms`);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -69,13 +81,15 @@ function getGitChangedFiles(basePath: string): string[] {
|
||||
let output = execSync('git diff --name-only HEAD 2>/dev/null', {
|
||||
cwd: basePath,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_DIFF,
|
||||
}).trim();
|
||||
|
||||
const cachedOutput = execSync('git diff --name-only --cached 2>/dev/null', {
|
||||
cwd: basePath,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_DIFF,
|
||||
}).trim();
|
||||
|
||||
if (cachedOutput) {
|
||||
@@ -87,12 +101,16 @@ function getGitChangedFiles(basePath: string): string[] {
|
||||
output = execSync('git diff --name-only HEAD~1 HEAD 2>/dev/null', {
|
||||
cwd: basePath,
|
||||
encoding: 'utf8',
|
||||
stdio: ['pipe', 'pipe', 'pipe']
|
||||
stdio: ['pipe', 'pipe', 'pipe'],
|
||||
timeout: EXEC_TIMEOUTS.GIT_DIFF,
|
||||
}).trim();
|
||||
}
|
||||
|
||||
return output ? output.split('\n').filter(f => f.trim()) : [];
|
||||
} catch (e) {
|
||||
} catch (e: unknown) {
|
||||
if (isExecTimeoutError(e)) {
|
||||
console.warn(`[detect_changed_modules] git diff timed out after ${EXEC_TIMEOUTS.GIT_DIFF}ms`);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
} from './codex-lens.js';
|
||||
import type { ProgressInfo } from './codex-lens.js';
|
||||
import { getProjectRoot } from '../utils/path-validator.js';
|
||||
import { EXEC_TIMEOUTS } from '../utils/exec-constants.js';
|
||||
|
||||
// Timing utilities for performance analysis
|
||||
const TIMING_ENABLED = process.env.SMART_SEARCH_TIMING === '1' || process.env.DEBUG?.includes('timing');
|
||||
@@ -603,7 +604,7 @@ function checkToolAvailability(toolName: string): boolean {
|
||||
try {
|
||||
const isWindows = process.platform === 'win32';
|
||||
const command = isWindows ? 'where' : 'which';
|
||||
execSync(`${command} ${toolName}`, { stdio: 'ignore' });
|
||||
execSync(`${command} ${toolName}`, { stdio: 'ignore', timeout: EXEC_TIMEOUTS.SYSTEM_INFO });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
|
||||
24
ccw/src/utils/exec-constants.ts
Normal file
24
ccw/src/utils/exec-constants.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
/**
|
||||
* Centralized timeout defaults for synchronous process execution.
|
||||
*
|
||||
* `execSync` blocks the Node.js event loop. Always provide a timeout so callers
|
||||
* fail fast instead of hanging indefinitely when external tools stall.
|
||||
*
|
||||
* Guidance:
|
||||
* - Use `GIT_QUICK` for lightweight git queries (rev-parse, status).
|
||||
* - Use `GIT_DIFF` for diff-based queries.
|
||||
* - Use `GIT_LOG` for log/history queries.
|
||||
* - Use `PYTHON_VERSION` for `python --version` style probes.
|
||||
* - Use `SYSTEM_INFO` for OS/hardware capability probes (wmic, nvidia-smi, which/where).
|
||||
* - Use `PROCESS_SPAWN` for short-lived spawn-style operations.
|
||||
* - Use `PACKAGE_INSTALL` for package manager operations that may take minutes.
|
||||
*/
|
||||
export const EXEC_TIMEOUTS = {
|
||||
GIT_QUICK: 5_000,
|
||||
GIT_DIFF: 10_000,
|
||||
GIT_LOG: 15_000,
|
||||
PYTHON_VERSION: 5_000,
|
||||
SYSTEM_INFO: 10_000,
|
||||
PROCESS_SPAWN: 30_000,
|
||||
PACKAGE_INSTALL: 300_000,
|
||||
} as const;
|
||||
@@ -69,6 +69,21 @@ export function validatePath(inputPath: string, options: ValidatePathOptions = {
|
||||
return { valid: false, path: null, error: `Invalid path: ${message}` };
|
||||
}
|
||||
|
||||
// Check if within base directory when specified (pre-symlink resolution)
|
||||
const resolvedBase = baseDir ? resolvePath(baseDir) : null;
|
||||
if (resolvedBase) {
|
||||
const relativePath = relative(resolvedBase, resolvedPath);
|
||||
|
||||
// Path traversal detection: relative path should not start with '..'
|
||||
if (relativePath.startsWith('..') || isAbsolute(relativePath)) {
|
||||
return {
|
||||
valid: false,
|
||||
path: null,
|
||||
error: `Path must be within ${resolvedBase}`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Check if path exists when required
|
||||
if (mustExist && !existsSync(resolvedPath)) {
|
||||
return { valid: false, path: null, error: `Path does not exist: ${resolvedPath}` };
|
||||
@@ -83,11 +98,30 @@ export function validatePath(inputPath: string, options: ValidatePathOptions = {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
return { valid: false, path: null, error: `Cannot resolve path: ${message}` };
|
||||
}
|
||||
} else if (resolvedBase) {
|
||||
// For non-existent paths, resolve the nearest existing ancestor to prevent symlink-based escapes
|
||||
// (e.g., baseDir/link/newfile where baseDir/link is a symlink to a disallowed location).
|
||||
let existingPath = resolvedPath;
|
||||
while (!existsSync(existingPath)) {
|
||||
const parent = resolve(existingPath, '..');
|
||||
if (parent === existingPath) break;
|
||||
existingPath = parent;
|
||||
}
|
||||
|
||||
if (existsSync(existingPath)) {
|
||||
try {
|
||||
const realExisting = realpathSync(existingPath);
|
||||
const remainder = relative(existingPath, resolvedPath);
|
||||
realPath = remainder && remainder !== '.' ? join(realExisting, remainder) : realExisting;
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
return { valid: false, path: null, error: `Cannot resolve path: ${message}` };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if within base directory when specified
|
||||
if (baseDir) {
|
||||
const resolvedBase = resolvePath(baseDir);
|
||||
// Check if within base directory when specified (post-symlink resolution)
|
||||
if (resolvedBase) {
|
||||
const relativePath = relative(resolvedBase, realPath);
|
||||
|
||||
// Path traversal detection: relative path should not start with '..'
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
* Inspired by MCP filesystem server's security model.
|
||||
*/
|
||||
|
||||
import { resolve, isAbsolute, normalize, relative } from 'path';
|
||||
import { resolve, isAbsolute, normalize, relative, sep } from 'path';
|
||||
import { realpath, access } from 'fs/promises';
|
||||
import { constants } from 'fs';
|
||||
|
||||
@@ -42,6 +42,27 @@ export function normalizePath(p: string): string {
|
||||
return normalize(p).replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
function canonicalizeForComparison(p: string): string {
|
||||
const base = getProjectRoot();
|
||||
const absolute = isAbsolute(p) ? p : resolve(base, p);
|
||||
let canonical = normalize(absolute);
|
||||
|
||||
// Remove trailing separators (except drive roots like C:\ and posix root /)
|
||||
canonical = canonical.replace(/[\\/]+$/, '');
|
||||
if (/^[a-zA-Z]:$/.test(canonical)) {
|
||||
canonical += sep;
|
||||
} else if (canonical === '') {
|
||||
canonical = sep;
|
||||
}
|
||||
|
||||
// Windows paths are case-insensitive.
|
||||
if (process.platform === 'win32') {
|
||||
canonical = canonical.toLowerCase();
|
||||
}
|
||||
|
||||
return canonical;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if path is within allowed directories
|
||||
*/
|
||||
@@ -49,12 +70,13 @@ export function isPathWithinAllowedDirectories(
|
||||
targetPath: string,
|
||||
allowedDirectories: string[]
|
||||
): boolean {
|
||||
const normalizedTarget = normalizePath(targetPath);
|
||||
const canonicalTarget = canonicalizeForComparison(targetPath);
|
||||
return allowedDirectories.some(dir => {
|
||||
const normalizedDir = normalizePath(dir);
|
||||
// Check if path equals or starts with allowed directory
|
||||
return normalizedTarget === normalizedDir ||
|
||||
normalizedTarget.startsWith(normalizedDir + '/');
|
||||
const canonicalDir = canonicalizeForComparison(dir);
|
||||
if (canonicalTarget === canonicalDir) return true;
|
||||
|
||||
const boundary = canonicalDir.endsWith(sep) ? canonicalDir : canonicalDir + sep;
|
||||
return canonicalTarget.startsWith(boundary);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,15 @@
|
||||
*/
|
||||
|
||||
import { execSync } from 'child_process';
|
||||
import { EXEC_TIMEOUTS } from './exec-constants.js';
|
||||
|
||||
function isExecTimeoutError(error: unknown): boolean {
|
||||
const err = error as { code?: unknown; errno?: unknown; message?: unknown } | null;
|
||||
const code = err?.code ?? err?.errno;
|
||||
if (code === 'ETIMEDOUT') return true;
|
||||
const message = typeof err?.message === 'string' ? err.message : '';
|
||||
return message.includes('ETIMEDOUT');
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse Python version string to major.minor numbers
|
||||
@@ -40,7 +49,7 @@ export function getSystemPython(): string {
|
||||
const customPython = process.env.CCW_PYTHON;
|
||||
if (customPython) {
|
||||
try {
|
||||
const version = execSync(`"${customPython}" --version 2>&1`, { encoding: 'utf8' });
|
||||
const version = execSync(`"${customPython}" --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION });
|
||||
if (version.includes('Python 3')) {
|
||||
const parsed = parsePythonVersion(version);
|
||||
if (parsed && !isPythonVersionCompatible(parsed.major, parsed.minor)) {
|
||||
@@ -48,8 +57,12 @@ export function getSystemPython(): string {
|
||||
}
|
||||
return `"${customPython}"`;
|
||||
}
|
||||
} catch {
|
||||
console.warn(`[Python] Warning: CCW_PYTHON="${customPython}" is not a valid Python executable, falling back to system Python`);
|
||||
} catch (err: unknown) {
|
||||
if (isExecTimeoutError(err)) {
|
||||
console.warn(`[Python] Warning: CCW_PYTHON version check timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms, falling back to system Python`);
|
||||
} else {
|
||||
console.warn(`[Python] Warning: CCW_PYTHON="${customPython}" is not a valid Python executable, falling back to system Python`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,12 +71,15 @@ export function getSystemPython(): string {
|
||||
const compatibleVersions = ['3.12', '3.11', '3.10', '3.9'];
|
||||
for (const ver of compatibleVersions) {
|
||||
try {
|
||||
const version = execSync(`py -${ver} --version 2>&1`, { encoding: 'utf8' });
|
||||
const version = execSync(`py -${ver} --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION });
|
||||
if (version.includes(`Python ${ver}`)) {
|
||||
console.log(`[Python] Found compatible Python ${ver} via py launcher`);
|
||||
return `py -${ver}`;
|
||||
}
|
||||
} catch {
|
||||
} catch (err: unknown) {
|
||||
if (isExecTimeoutError(err)) {
|
||||
console.warn(`[Python] Warning: py -${ver} version check timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms`);
|
||||
}
|
||||
// Version not installed, try next
|
||||
}
|
||||
}
|
||||
@@ -75,7 +91,7 @@ export function getSystemPython(): string {
|
||||
|
||||
for (const cmd of commands) {
|
||||
try {
|
||||
const version = execSync(`${cmd} --version 2>&1`, { encoding: 'utf8' });
|
||||
const version = execSync(`${cmd} --version 2>&1`, { encoding: 'utf8', timeout: EXEC_TIMEOUTS.PYTHON_VERSION });
|
||||
if (version.includes('Python 3')) {
|
||||
const parsed = parsePythonVersion(version);
|
||||
if (parsed) {
|
||||
@@ -90,7 +106,10 @@ export function getSystemPython(): string {
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
} catch (err: unknown) {
|
||||
if (isExecTimeoutError(err)) {
|
||||
console.warn(`[Python] Warning: ${cmd} --version timed out after ${EXEC_TIMEOUTS.PYTHON_VERSION}ms`);
|
||||
}
|
||||
// Try next command
|
||||
}
|
||||
}
|
||||
|
||||
26
ccw/src/utils/shell-escape.ts
Normal file
26
ccw/src/utils/shell-escape.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
/**
|
||||
* Windows cmd.exe argument escaping for spawn({ shell: true }).
|
||||
*
|
||||
* This utility escapes cmd.exe metacharacters using caret (^) so that user
|
||||
* controlled input cannot inject additional commands.
|
||||
*/
|
||||
|
||||
const WINDOWS_METACHARS = /[&|<>()%!"]/g;
|
||||
|
||||
export function escapeWindowsArg(arg: string): string {
|
||||
if (arg === '') return '""';
|
||||
|
||||
// Escape caret first to avoid double-escaping when prefixing other metachars.
|
||||
let escaped = arg.replace(/\^/g, '^^');
|
||||
|
||||
// Escape cmd.exe metacharacters with caret.
|
||||
escaped = escaped.replace(WINDOWS_METACHARS, '^$&');
|
||||
|
||||
// Wrap whitespace-containing args in double quotes.
|
||||
if (/\s/.test(escaped)) {
|
||||
escaped = `"${escaped}"`;
|
||||
}
|
||||
|
||||
return escaped;
|
||||
}
|
||||
|
||||
148
ccw/tests/auth-routes.test.ts
Normal file
148
ccw/tests/auth-routes.test.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Unit tests for auth routes (ccw/dist/core/routes/auth-routes.js).
|
||||
*/
|
||||
|
||||
import { afterEach, before, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
|
||||
type JsonResponse = {
|
||||
status: number;
|
||||
json: any;
|
||||
text: string;
|
||||
headers: http.IncomingHttpHeaders;
|
||||
};
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, reqPath: string, headers?: Record<string, string>): Promise<JsonResponse> {
|
||||
const url = new URL(reqPath, baseUrl);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: { Accept: 'application/json', ...(headers ?? {}) },
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody, headers: res.headers });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function cookiePairsFromSetCookie(setCookie: string | string[] | undefined): string {
|
||||
if (!setCookie) return '';
|
||||
const items = Array.isArray(setCookie) ? setCookie : [setCookie];
|
||||
const pairs: string[] = [];
|
||||
for (const item of items) {
|
||||
const pair = item.split(';')[0]?.trim();
|
||||
if (pair) pairs.push(pair);
|
||||
}
|
||||
return pairs.join('; ');
|
||||
}
|
||||
|
||||
async function createServer(): Promise<{ server: http.Server; baseUrl: string }> {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath: process.cwd(),
|
||||
handlePostRequest() {},
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await authRoutes.handleAuthRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
server.listen(0, '127.0.0.1', () => resolve());
|
||||
server.on('error', reject);
|
||||
});
|
||||
|
||||
const address = server.address();
|
||||
if (!address || typeof address === 'string') throw new Error('Expected server to listen on a TCP port');
|
||||
return { server, baseUrl: `http://127.0.0.1:${address.port}` };
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let authRoutes: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let csrfManager: any;
|
||||
|
||||
describe('auth routes: csrf-token endpoint', async () => {
|
||||
before(async () => {
|
||||
authRoutes = await import(new URL('../dist/core/routes/auth-routes.js', import.meta.url).href);
|
||||
csrfManager = await import(new URL('../dist/core/auth/csrf-manager.js', import.meta.url).href);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
csrfManager.resetCsrfTokenManager();
|
||||
});
|
||||
|
||||
it('GET /api/csrf-token returns token in body, header, and cookie', async () => {
|
||||
const { server, baseUrl } = await createServer();
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/csrf-token');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json?.csrfToken);
|
||||
|
||||
const token = String(res.json.csrfToken);
|
||||
assert.match(token, /^[a-f0-9]{64}$/);
|
||||
assert.equal(res.headers['x-csrf-token'], token);
|
||||
|
||||
const setCookie = res.headers['set-cookie'];
|
||||
const cookies = Array.isArray(setCookie) ? setCookie.join('\n') : String(setCookie || '');
|
||||
assert.ok(cookies.includes('XSRF-TOKEN='));
|
||||
assert.ok(cookies.includes('HttpOnly'));
|
||||
assert.ok(cookies.includes('SameSite=Strict'));
|
||||
assert.ok(cookies.includes(token));
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/csrf-token returns a new token per request (same session)', async () => {
|
||||
const { server, baseUrl } = await createServer();
|
||||
try {
|
||||
const first = await requestJson(baseUrl, 'GET', '/api/csrf-token');
|
||||
assert.equal(first.status, 200);
|
||||
const cookieHeader = cookiePairsFromSetCookie(first.headers['set-cookie']);
|
||||
assert.ok(cookieHeader.includes('ccw_session_id='));
|
||||
|
||||
const second = await requestJson(baseUrl, 'GET', '/api/csrf-token', { Cookie: cookieHeader });
|
||||
assert.equal(second.status, 200);
|
||||
|
||||
assert.notEqual(first.json.csrfToken, second.json.csrfToken);
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -10,9 +10,10 @@
|
||||
import { after, afterEach, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { existsSync, mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import inquirer from 'inquirer';
|
||||
|
||||
const TEST_CCW_HOME = mkdtempSync(join(tmpdir(), 'ccw-cli-command-'));
|
||||
process.env.CCW_DATA_DIR = TEST_CCW_HOME;
|
||||
@@ -20,6 +21,7 @@ process.env.CCW_DATA_DIR = TEST_CCW_HOME;
|
||||
const cliCommandPath = new URL('../dist/commands/cli.js', import.meta.url).href;
|
||||
const cliExecutorPath = new URL('../dist/tools/cli-executor.js', import.meta.url).href;
|
||||
const historyStorePath = new URL('../dist/tools/cli-history-store.js', import.meta.url).href;
|
||||
const storageManagerPath = new URL('../dist/tools/storage-manager.js', import.meta.url).href;
|
||||
|
||||
function stubHttpRequest(): void {
|
||||
mock.method(http, 'request', () => {
|
||||
@@ -50,11 +52,14 @@ describe('cli command module', async () => {
|
||||
let cliExecutorModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let historyStoreModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let storageManagerModule: any;
|
||||
|
||||
before(async () => {
|
||||
cliModule = await import(cliCommandPath);
|
||||
cliExecutorModule = await import(cliExecutorPath);
|
||||
historyStoreModule = await import(historyStorePath);
|
||||
storageManagerModule = await import(storageManagerPath);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -112,6 +117,117 @@ describe('cli command module', async () => {
|
||||
assert.deepEqual(exitCodes, [0, 0, 0]);
|
||||
});
|
||||
|
||||
it('prints a --file tip when a multi-line prompt is provided via --prompt', async () => {
|
||||
stubHttpRequest();
|
||||
|
||||
const logs: string[] = [];
|
||||
mock.method(console, 'log', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
mock.method(console, 'error', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
|
||||
mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => {
|
||||
return {
|
||||
success: true,
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
execution: { id: 'EXEC-ML', duration_ms: 1, status: 'success' },
|
||||
conversation: { turn_count: 1, total_duration_ms: 1 },
|
||||
};
|
||||
});
|
||||
|
||||
const exitCodes: Array<number | undefined> = [];
|
||||
mock.method(process as any, 'exit', (code?: number) => {
|
||||
exitCodes.push(code);
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('exec', [], { prompt: 'line1\nline2\nline3\nline4', tool: 'gemini', stream: true });
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
assert.ok(logs.some((l) => l.includes('Tip: Use --file option to avoid shell escaping issues with multi-line prompts')));
|
||||
assert.ok(logs.some((l) => l.includes('Example: ccw cli -f prompt.txt --tool gemini')));
|
||||
assert.deepEqual(exitCodes, [0]);
|
||||
});
|
||||
|
||||
it('does not print the --file tip for single-line prompts', async () => {
|
||||
stubHttpRequest();
|
||||
|
||||
const logs: string[] = [];
|
||||
mock.method(console, 'log', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
mock.method(console, 'error', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
|
||||
mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => {
|
||||
return {
|
||||
success: true,
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
execution: { id: 'EXEC-SL', duration_ms: 1, status: 'success' },
|
||||
conversation: { turn_count: 1, total_duration_ms: 1 },
|
||||
};
|
||||
});
|
||||
|
||||
const exitCodes: Array<number | undefined> = [];
|
||||
mock.method(process as any, 'exit', (code?: number) => {
|
||||
exitCodes.push(code);
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('exec', [], { prompt: 'Hello', tool: 'gemini', stream: true });
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
assert.equal(
|
||||
logs.some((l) => l.includes('Tip: Use --file option to avoid shell escaping issues with multi-line prompts')),
|
||||
false,
|
||||
);
|
||||
assert.deepEqual(exitCodes, [0]);
|
||||
});
|
||||
|
||||
it('prints full output hint immediately after stderr truncation (no troubleshooting duplicate)', async () => {
|
||||
stubHttpRequest();
|
||||
|
||||
const logs: string[] = [];
|
||||
mock.method(console, 'log', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
mock.method(console, 'error', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
|
||||
mock.method(cliExecutorModule.cliExecutorTool, 'execute', async () => {
|
||||
const stderr = Array.from({ length: 31 }, (_, i) => `stderr-line-${i}`).join('\n');
|
||||
return {
|
||||
success: false,
|
||||
stdout: '',
|
||||
stderr,
|
||||
execution: { id: 'EXEC-ERR', duration_ms: 12, status: 'error', exit_code: 1 },
|
||||
conversation: { turn_count: 1, total_duration_ms: 12 },
|
||||
};
|
||||
});
|
||||
|
||||
const exitCodes: Array<number | undefined> = [];
|
||||
mock.method(process as any, 'exit', (code?: number) => {
|
||||
exitCodes.push(code);
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('exec', [], { prompt: 'Hello', tool: 'gemini', stream: true });
|
||||
await new Promise((resolve) => setTimeout(resolve, 200));
|
||||
|
||||
const truncationIndex = logs.findIndex((l) => l.includes('... 1 more lines'));
|
||||
const hintIndex = logs.findIndex((l) => l.includes('💡 View full output: ccw cli output EXEC-ERR'));
|
||||
assert.ok(truncationIndex >= 0);
|
||||
assert.ok(hintIndex >= 0);
|
||||
assert.equal(hintIndex, truncationIndex + 1);
|
||||
|
||||
assert.equal(logs.filter((l) => l.includes('View full output: ccw cli output EXEC-ERR')).length, 1);
|
||||
assert.equal(logs.filter((l) => l.includes('• View full output')).length, 0);
|
||||
assert.deepEqual(exitCodes, [1]);
|
||||
});
|
||||
|
||||
it('supports resume with conversation ID and latest (no prompt required)', async () => {
|
||||
stubHttpRequest();
|
||||
mock.method(console, 'log', () => {});
|
||||
@@ -181,6 +297,100 @@ describe('cli command module', async () => {
|
||||
assert.equal(executed, false);
|
||||
});
|
||||
|
||||
it('shows --file guidance first in help output (multi-line prompts)', async () => {
|
||||
const logs: string[] = [];
|
||||
mock.method(console, 'log', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
mock.method(console, 'error', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('--help', [], {});
|
||||
|
||||
const usageFileIndex = logs.findIndex((l) => l.includes('ccw cli -f prompt.txt'));
|
||||
const usagePromptIndex = logs.findIndex((l) => l.includes('ccw cli -p "<prompt>"'));
|
||||
assert.ok(usageFileIndex >= 0);
|
||||
assert.ok(usagePromptIndex >= 0);
|
||||
assert.ok(usageFileIndex < usagePromptIndex);
|
||||
|
||||
const optionFileIndex = logs.findIndex((l) => l.includes('-f, --file <file>'));
|
||||
const optionPromptIndex = logs.findIndex((l) => l.includes('-p, --prompt <text>'));
|
||||
assert.ok(optionFileIndex >= 0);
|
||||
assert.ok(optionPromptIndex >= 0);
|
||||
assert.ok(optionFileIndex < optionPromptIndex);
|
||||
assert.ok(logs.some((l) => l.includes('Read prompt from file (recommended for multi-line prompts)')));
|
||||
|
||||
assert.ok(logs.some((l) => l.includes('Examples:')));
|
||||
assert.ok(logs.some((l) => l.includes('ccw cli -f my-prompt.txt --tool gemini')));
|
||||
assert.ok(logs.some((l) => l.includes("ccw cli -f <(cat <<'EOF'")));
|
||||
assert.ok(logs.some((l) => l.includes("@'")));
|
||||
assert.ok(logs.some((l) => l.includes('Out-File -Encoding utf8 prompt.tmp; ccw cli -f prompt.tmp --tool gemini')));
|
||||
assert.ok(logs.some((l) => l.includes('Tip: For complex prompts, use --file to avoid shell escaping issues')));
|
||||
});
|
||||
|
||||
it('prompts for confirmation before cleaning all storage (and cancels safely)', async () => {
|
||||
const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-cancel');
|
||||
const markerDir = join(projectRoot, 'cli-history');
|
||||
mkdirSync(markerDir, { recursive: true });
|
||||
writeFileSync(join(markerDir, 'dummy.txt'), '1234');
|
||||
|
||||
const stats = storageManagerModule.getStorageStats();
|
||||
const expectedSize = storageManagerModule.formatBytes(stats.totalSize);
|
||||
|
||||
const promptCalls: any[] = [];
|
||||
mock.method(inquirer, 'prompt', async (questions: any) => {
|
||||
promptCalls.push(questions);
|
||||
return { proceed: false };
|
||||
});
|
||||
|
||||
const logs: string[] = [];
|
||||
mock.method(console, 'log', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
mock.method(console, 'error', (...args: any[]) => {
|
||||
logs.push(args.map(String).join(' '));
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('storage', ['clean'], { force: false });
|
||||
|
||||
assert.equal(promptCalls.length, 1);
|
||||
assert.equal(promptCalls[0][0].type, 'confirm');
|
||||
assert.equal(promptCalls[0][0].default, false);
|
||||
assert.ok(promptCalls[0][0].message.includes(`${stats.projectCount} projects`));
|
||||
assert.ok(promptCalls[0][0].message.includes(`(${expectedSize})`));
|
||||
|
||||
assert.ok(logs.some((l) => l.includes('Storage clean cancelled')));
|
||||
assert.equal(existsSync(projectRoot), true);
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('bypasses confirmation prompt when --force is set for storage clean', async () => {
|
||||
const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-force');
|
||||
const markerDir = join(projectRoot, 'cli-history');
|
||||
mkdirSync(markerDir, { recursive: true });
|
||||
writeFileSync(join(markerDir, 'dummy.txt'), '1234');
|
||||
|
||||
mock.method(inquirer, 'prompt', async () => {
|
||||
throw new Error('inquirer.prompt should not be called when --force is set');
|
||||
});
|
||||
|
||||
await cliModule.cliCommand('storage', ['clean'], { force: true });
|
||||
assert.equal(existsSync(projectRoot), false);
|
||||
});
|
||||
|
||||
it('deletes all storage after interactive confirmation', async () => {
|
||||
const projectRoot = join(TEST_CCW_HOME, 'projects', 'test-project-confirm');
|
||||
const markerDir = join(projectRoot, 'cli-history');
|
||||
mkdirSync(markerDir, { recursive: true });
|
||||
writeFileSync(join(markerDir, 'dummy.txt'), '1234');
|
||||
|
||||
mock.method(inquirer, 'prompt', async () => ({ proceed: true }));
|
||||
|
||||
await cliModule.cliCommand('storage', ['clean'], { force: false });
|
||||
assert.equal(existsSync(projectRoot), false);
|
||||
});
|
||||
|
||||
it('prints history and retrieves conversation detail from SQLite store', async () => {
|
||||
stubHttpRequest();
|
||||
|
||||
|
||||
195
ccw/tests/cli-executor-kill.test.ts
Normal file
195
ccw/tests/cli-executor-kill.test.ts
Normal file
@@ -0,0 +1,195 @@
|
||||
/**
|
||||
* Regression tests for killCurrentCliProcess timeout handling (DSC-007).
|
||||
*
|
||||
* Focus:
|
||||
* - Avoid stale SIGKILL timers killing a subsequent child process
|
||||
* - Ensure SIGKILL is sent when SIGTERM does not terminate the process
|
||||
*/
|
||||
|
||||
import { after, before, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { createRequire } from 'node:module';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const cliExecutorUrl = new URL('../dist/tools/cli-executor.js', import.meta.url).href;
|
||||
const historyStoreUrl = new URL('../dist/tools/cli-history-store.js', import.meta.url).href;
|
||||
|
||||
type FakeChild = EventEmitter & {
|
||||
pid?: number;
|
||||
killed: boolean;
|
||||
stdin: PassThrough;
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: (signal?: string) => boolean;
|
||||
killCalls: string[];
|
||||
close: (code?: number) => void;
|
||||
};
|
||||
|
||||
type ToolChildBehavior = {
|
||||
closeOnSigterm: boolean;
|
||||
};
|
||||
|
||||
describe('cli-executor: killCurrentCliProcess regression', async () => {
|
||||
const require = createRequire(import.meta.url);
|
||||
const childProcess = require('child_process');
|
||||
const originalSpawn = childProcess.spawn;
|
||||
const originalSetTimeout = globalThis.setTimeout;
|
||||
|
||||
const envSnapshot: Record<string, string | undefined> = {};
|
||||
let ccwHome = '';
|
||||
let projectDir = '';
|
||||
|
||||
const toolChildren: FakeChild[] = [];
|
||||
const plannedBehaviors: ToolChildBehavior[] = [];
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let cliExecutorModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let historyStoreModule: any;
|
||||
|
||||
function unrefFastSetTimeout<TArgs extends unknown[]>(
|
||||
fn: (...args: TArgs) => void,
|
||||
delay?: number,
|
||||
...args: TArgs
|
||||
): ReturnType<typeof setTimeout> {
|
||||
const t = originalSetTimeout(fn as (...args: unknown[]) => void, 25, ...args);
|
||||
(t as unknown as { unref?: () => void }).unref?.();
|
||||
return t;
|
||||
}
|
||||
|
||||
function createFakeChild(behavior: ToolChildBehavior, pid: number): FakeChild {
|
||||
const child = new EventEmitter() as FakeChild;
|
||||
child.pid = pid;
|
||||
child.killed = false;
|
||||
child.stdin = new PassThrough();
|
||||
child.stdout = new PassThrough();
|
||||
child.stderr = new PassThrough();
|
||||
child.killCalls = [];
|
||||
|
||||
let closed = false;
|
||||
child.close = (code: number = 0) => {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
child.emit('close', code);
|
||||
};
|
||||
|
||||
child.kill = (signal?: string) => {
|
||||
const sig = signal || 'SIGTERM';
|
||||
child.killCalls.push(sig);
|
||||
|
||||
if (sig === 'SIGTERM') {
|
||||
if (behavior.closeOnSigterm) {
|
||||
child.killed = true;
|
||||
queueMicrotask(() => child.close(0));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (sig === 'SIGKILL') {
|
||||
child.killed = true;
|
||||
queueMicrotask(() => child.close(0));
|
||||
return true;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
before(async () => {
|
||||
envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR;
|
||||
|
||||
ccwHome = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-kill-home-'));
|
||||
projectDir = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-kill-project-'));
|
||||
process.env.CCW_DATA_DIR = ccwHome;
|
||||
|
||||
globalThis.setTimeout = unrefFastSetTimeout as unknown as typeof setTimeout;
|
||||
|
||||
childProcess.spawn = (command: unknown, args: unknown[], options: Record<string, unknown>) => {
|
||||
const cmd = String(command);
|
||||
const argv = Array.isArray(args) ? args.map((a) => String(a)) : [];
|
||||
|
||||
// Tool lookup helpers.
|
||||
if (cmd === 'where' || cmd === 'which') {
|
||||
const child = createFakeChild({ closeOnSigterm: true }, 4000);
|
||||
queueMicrotask(() => {
|
||||
child.stdout.write(`C:\\\\fake\\\\${argv[0] || 'tool'}.cmd\r\n`);
|
||||
child.close(0);
|
||||
});
|
||||
return child;
|
||||
}
|
||||
|
||||
const behavior = plannedBehaviors.shift() ?? { closeOnSigterm: true };
|
||||
const child = createFakeChild(behavior, 5000 + toolChildren.length);
|
||||
toolChildren.push(child);
|
||||
|
||||
// Keep the process running until explicitly closed or killed.
|
||||
return child;
|
||||
};
|
||||
|
||||
cliExecutorModule = await import(cliExecutorUrl);
|
||||
historyStoreModule = await import(historyStoreUrl);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
childProcess.spawn = originalSpawn;
|
||||
globalThis.setTimeout = originalSetTimeout;
|
||||
|
||||
try {
|
||||
historyStoreModule?.closeAllStores?.();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
if (projectDir) rmSync(projectDir, { recursive: true, force: true });
|
||||
if (ccwHome) rmSync(ccwHome, { recursive: true, force: true });
|
||||
|
||||
process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR;
|
||||
});
|
||||
|
||||
it('does not kill a subsequent child via a stale SIGKILL timeout', async () => {
|
||||
plannedBehaviors.push({ closeOnSigterm: true });
|
||||
plannedBehaviors.push({ closeOnSigterm: false });
|
||||
|
||||
const run1 = cliExecutorModule.handler({ tool: 'codex', prompt: 'test', cd: projectDir });
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
assert.equal(cliExecutorModule.killCurrentCliProcess(), true);
|
||||
await run1;
|
||||
|
||||
const run2 = cliExecutorModule.handler({ tool: 'codex', prompt: 'test-2', cd: projectDir });
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
// Wait long enough for the (patched) kill timeout to fire if not cleared.
|
||||
await new Promise((resolve) => originalSetTimeout(resolve, 60));
|
||||
|
||||
assert.equal(toolChildren.length >= 2, true);
|
||||
assert.deepEqual(toolChildren[1].killCalls, []);
|
||||
|
||||
toolChildren[1].close(0);
|
||||
await run2;
|
||||
});
|
||||
|
||||
it('sends SIGKILL when SIGTERM does not terminate the process', async () => {
|
||||
plannedBehaviors.push({ closeOnSigterm: false });
|
||||
|
||||
const run = cliExecutorModule.handler({ tool: 'codex', prompt: 'timeout-test', cd: projectDir });
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
assert.equal(cliExecutorModule.killCurrentCliProcess(), true);
|
||||
// Keep the event loop alive long enough for the (unref'd) timeout to fire.
|
||||
await new Promise((resolve) => originalSetTimeout(resolve, 60));
|
||||
await run;
|
||||
|
||||
assert.equal(toolChildren.length >= 1, true);
|
||||
assert.ok(toolChildren[toolChildren.length - 1].killCalls.includes('SIGTERM'));
|
||||
assert.ok(toolChildren[toolChildren.length - 1].killCalls.includes('SIGKILL'));
|
||||
});
|
||||
});
|
||||
173
ccw/tests/cli-executor-merge-validation.test.ts
Normal file
173
ccw/tests/cli-executor-merge-validation.test.ts
Normal file
@@ -0,0 +1,173 @@
|
||||
/**
|
||||
* Regression tests for conversation merge validation (DSC-008).
|
||||
*
|
||||
* Focus:
|
||||
* - Merge with all invalid IDs returns a descriptive error including attempted IDs
|
||||
* - Merge proceeds when at least one source conversation is valid
|
||||
*/
|
||||
|
||||
import { after, before, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { createRequire } from 'node:module';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const cliExecutorUrl = new URL('../dist/tools/cli-executor.js', import.meta.url).href;
|
||||
const historyStoreUrl = new URL('../dist/tools/cli-history-store.js', import.meta.url).href;
|
||||
|
||||
type FakeChild = EventEmitter & {
|
||||
pid?: number;
|
||||
killed: boolean;
|
||||
stdin: PassThrough;
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: (signal?: string) => boolean;
|
||||
close: (code?: number) => void;
|
||||
};
|
||||
|
||||
function createFakeChild(pid: number): FakeChild {
|
||||
const child = new EventEmitter() as FakeChild;
|
||||
child.pid = pid;
|
||||
child.killed = false;
|
||||
child.stdin = new PassThrough();
|
||||
child.stdout = new PassThrough();
|
||||
child.stderr = new PassThrough();
|
||||
|
||||
let closed = false;
|
||||
child.close = (code: number = 0) => {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
child.emit('close', code);
|
||||
};
|
||||
|
||||
child.kill = (signal?: string) => {
|
||||
child.killed = true;
|
||||
queueMicrotask(() => child.close(0));
|
||||
return true;
|
||||
};
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
describe('cli-executor: merge validation regression', async () => {
|
||||
const require = createRequire(import.meta.url);
|
||||
const childProcess = require('child_process');
|
||||
const originalSpawn = childProcess.spawn;
|
||||
|
||||
const envSnapshot: Record<string, string | undefined> = {};
|
||||
let ccwHome = '';
|
||||
let projectDir = '';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let cliExecutorModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let historyStoreModule: any;
|
||||
|
||||
before(async () => {
|
||||
envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR;
|
||||
|
||||
ccwHome = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-merge-home-'));
|
||||
projectDir = mkdtempSync(join(tmpdir(), 'ccw-cli-executor-merge-project-'));
|
||||
process.env.CCW_DATA_DIR = ccwHome;
|
||||
|
||||
childProcess.spawn = (command: unknown, args: unknown[]) => {
|
||||
const cmd = String(command);
|
||||
const argv = Array.isArray(args) ? args.map((a) => String(a)) : [];
|
||||
|
||||
// Tool lookup helpers.
|
||||
if (cmd === 'where' || cmd === 'which') {
|
||||
const child = createFakeChild(4000);
|
||||
queueMicrotask(() => {
|
||||
child.stdout.write(`C:\\\\fake\\\\${argv[0] || 'tool'}.cmd\r\n`);
|
||||
child.close(0);
|
||||
});
|
||||
return child;
|
||||
}
|
||||
|
||||
const child = createFakeChild(5000);
|
||||
queueMicrotask(() => {
|
||||
child.stdout.write('OK\n');
|
||||
child.close(0);
|
||||
});
|
||||
return child;
|
||||
};
|
||||
|
||||
historyStoreModule = await import(historyStoreUrl);
|
||||
cliExecutorModule = await import(cliExecutorUrl);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
childProcess.spawn = originalSpawn;
|
||||
|
||||
try {
|
||||
historyStoreModule?.closeAllStores?.();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
if (projectDir) rmSync(projectDir, { recursive: true, force: true });
|
||||
if (ccwHome) rmSync(ccwHome, { recursive: true, force: true });
|
||||
process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR;
|
||||
});
|
||||
|
||||
it('throws a descriptive error when all merge IDs are invalid', async () => {
|
||||
await assert.rejects(
|
||||
() => cliExecutorModule.cliExecutorTool.execute({
|
||||
tool: 'codex',
|
||||
prompt: 'test',
|
||||
cd: projectDir,
|
||||
resume: 'MISSING-1, MISSING-2'
|
||||
}),
|
||||
(err: unknown) => {
|
||||
assert.ok(err instanceof Error);
|
||||
assert.ok(err.message.includes('No valid conversations found for merge'));
|
||||
assert.ok(err.message.includes('MISSING-1'));
|
||||
assert.ok(err.message.includes('MISSING-2'));
|
||||
return true;
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('merges when at least one source conversation is valid', async () => {
|
||||
const store = historyStoreModule.getHistoryStore(projectDir);
|
||||
store.saveConversation({
|
||||
id: 'CONV-MERGE-VALID-1',
|
||||
created_at: new Date('2025-01-01T00:00:00.000Z').toISOString(),
|
||||
updated_at: new Date('2025-01-01T00:00:01.000Z').toISOString(),
|
||||
tool: 'codex',
|
||||
model: 'default',
|
||||
mode: 'analysis',
|
||||
category: 'user',
|
||||
total_duration_ms: 1,
|
||||
turn_count: 1,
|
||||
latest_status: 'success',
|
||||
turns: [
|
||||
{
|
||||
turn: 1,
|
||||
timestamp: new Date('2025-01-01T00:00:00.000Z').toISOString(),
|
||||
prompt: 'Previous prompt',
|
||||
duration_ms: 1,
|
||||
status: 'success',
|
||||
exit_code: 0,
|
||||
output: { stdout: 'Previous output', stderr: '', truncated: false, cached: false }
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
const result = await cliExecutorModule.cliExecutorTool.execute({
|
||||
tool: 'codex',
|
||||
prompt: 'Next prompt',
|
||||
cd: projectDir,
|
||||
resume: 'CONV-MERGE-VALID-1, MISSING-99'
|
||||
});
|
||||
|
||||
assert.equal(result.success, true);
|
||||
assert.ok(result.execution?.id);
|
||||
});
|
||||
});
|
||||
|
||||
31
ccw/tests/cors.test.ts
Normal file
31
ccw/tests/cors.test.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Unit tests for CORS origin validation (ccw/dist/core/cors.js)
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
const corsUrl = new URL('../dist/core/cors.js', import.meta.url).href;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let corsMod: any;
|
||||
|
||||
describe('CORS origin validation', async () => {
|
||||
corsMod = await import(corsUrl);
|
||||
|
||||
it('allows localhost origins on the server port', () => {
|
||||
assert.equal(corsMod.validateCorsOrigin('http://localhost:3456', 3456), true);
|
||||
assert.equal(corsMod.validateCorsOrigin('http://127.0.0.1:3456', 3456), true);
|
||||
});
|
||||
|
||||
it('rejects external origins', () => {
|
||||
assert.equal(corsMod.validateCorsOrigin('http://evil.com', 3456), false);
|
||||
assert.equal(corsMod.validateCorsOrigin('http://localhost:3457', 3456), false);
|
||||
});
|
||||
|
||||
it('defaults missing or rejected Origin to localhost', () => {
|
||||
assert.equal(corsMod.getCorsOrigin(undefined, 3456), 'http://localhost:3456');
|
||||
assert.equal(corsMod.getCorsOrigin('http://evil.com', 3456), 'http://localhost:3456');
|
||||
});
|
||||
});
|
||||
|
||||
64
ccw/tests/csrf-manager.test.ts
Normal file
64
ccw/tests/csrf-manager.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/**
|
||||
* Unit tests for CsrfTokenManager (ccw/dist/core/auth/csrf-manager.js).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets the runtime implementation shipped in `ccw/dist`.
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
const csrfManagerUrl = new URL('../dist/core/auth/csrf-manager.js', import.meta.url).href;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
describe('CsrfTokenManager', async () => {
|
||||
mod = await import(csrfManagerUrl);
|
||||
|
||||
it('generateToken produces a 64-character hex token', () => {
|
||||
const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken('session-1');
|
||||
|
||||
assert.match(token, /^[a-f0-9]{64}$/);
|
||||
manager.dispose();
|
||||
});
|
||||
|
||||
it('validateToken accepts correct session token once', () => {
|
||||
const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken('session-1');
|
||||
|
||||
assert.equal(manager.validateToken(token, 'session-1'), true);
|
||||
assert.equal(manager.validateToken(token, 'session-1'), false);
|
||||
manager.dispose();
|
||||
});
|
||||
|
||||
it('validateToken rejects expired tokens', () => {
|
||||
const manager = new mod.CsrfTokenManager({ tokenTtlMs: -1000, cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken('session-1');
|
||||
|
||||
assert.equal(manager.validateToken(token, 'session-1'), false);
|
||||
assert.equal(manager.getActiveTokenCount(), 0);
|
||||
manager.dispose();
|
||||
});
|
||||
|
||||
it('cleanupExpiredTokens removes expired entries', () => {
|
||||
const manager = new mod.CsrfTokenManager({ tokenTtlMs: 10, cleanupIntervalMs: 0 });
|
||||
manager.generateToken('session-1');
|
||||
|
||||
const removed = manager.cleanupExpiredTokens(Date.now() + 100);
|
||||
assert.equal(removed, 1);
|
||||
assert.equal(manager.getActiveTokenCount(), 0);
|
||||
manager.dispose();
|
||||
});
|
||||
|
||||
it('session association prevents cross-session token reuse', () => {
|
||||
const manager = new mod.CsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken('session-1');
|
||||
|
||||
assert.equal(manager.validateToken(token, 'session-2'), false);
|
||||
assert.equal(manager.validateToken(token, 'session-1'), true);
|
||||
manager.dispose();
|
||||
});
|
||||
});
|
||||
|
||||
153
ccw/tests/csrf-middleware.test.ts
Normal file
153
ccw/tests/csrf-middleware.test.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
/**
|
||||
* Unit tests for CSRF middleware (ccw/dist/core/auth/csrf-middleware.js)
|
||||
*/
|
||||
|
||||
import { afterEach, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { PassThrough } from 'node:stream';
|
||||
|
||||
type MockResponse = {
|
||||
status: number | null;
|
||||
headers: Record<string, unknown>;
|
||||
body: string;
|
||||
writeHead: (status: number, headers?: Record<string, string>) => void;
|
||||
setHeader: (name: string, value: unknown) => void;
|
||||
getHeader: (name: string) => unknown;
|
||||
end: (body?: string) => void;
|
||||
};
|
||||
|
||||
function createMockRes(): MockResponse {
|
||||
const headers: Record<string, unknown> = {};
|
||||
const response: MockResponse = {
|
||||
status: null,
|
||||
headers,
|
||||
body: '',
|
||||
writeHead: (status: number, nextHeaders?: Record<string, string>) => {
|
||||
response.status = status;
|
||||
if (nextHeaders) {
|
||||
for (const [k, v] of Object.entries(nextHeaders)) {
|
||||
headers[k.toLowerCase()] = v;
|
||||
}
|
||||
}
|
||||
},
|
||||
setHeader: (name: string, value: unknown) => {
|
||||
headers[name.toLowerCase()] = value;
|
||||
},
|
||||
getHeader: (name: string) => headers[name.toLowerCase()],
|
||||
end: (body?: string) => {
|
||||
response.body = body ? String(body) : '';
|
||||
},
|
||||
};
|
||||
return response;
|
||||
}
|
||||
|
||||
const middlewareUrl = new URL('../dist/core/auth/csrf-middleware.js', import.meta.url);
|
||||
|
||||
const managerUrl = new URL('../dist/core/auth/csrf-manager.js', import.meta.url);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let middleware: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let csrfManager: any;
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
|
||||
describe('csrf middleware', async () => {
|
||||
middleware = await import(middlewareUrl.href);
|
||||
csrfManager = await import(managerUrl.href);
|
||||
|
||||
afterEach(() => {
|
||||
csrfManager.resetCsrfTokenManager();
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
});
|
||||
|
||||
it('allows non-state-changing requests without tokens', async () => {
|
||||
const req: any = { method: 'GET', headers: {} };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = await middleware.csrfValidation({ pathname: '/api/health', req, res });
|
||||
assert.equal(ok, true);
|
||||
assert.equal(res.status, null);
|
||||
});
|
||||
|
||||
it('rejects state-changing requests when tokens are missing', async () => {
|
||||
const req = new PassThrough() as any;
|
||||
req.method = 'POST';
|
||||
req.headers = {};
|
||||
const res = createMockRes();
|
||||
|
||||
const promise = middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res });
|
||||
queueMicrotask(() => {
|
||||
req.end();
|
||||
});
|
||||
const ok = await promise;
|
||||
assert.equal(ok, false);
|
||||
assert.equal(res.status, 403);
|
||||
assert.ok(res.body.includes('CSRF validation failed'));
|
||||
});
|
||||
|
||||
it('accepts valid CSRF token from cookies and rotates token', async () => {
|
||||
const sessionId = 'session-1';
|
||||
const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken(sessionId);
|
||||
|
||||
const req: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res });
|
||||
assert.equal(ok, true);
|
||||
|
||||
const rotated = res.headers['x-csrf-token'];
|
||||
assert.ok(typeof rotated === 'string');
|
||||
assert.notEqual(rotated, token);
|
||||
assert.match(rotated, /^[a-f0-9]{64}$/);
|
||||
|
||||
const setCookie = res.headers['set-cookie'];
|
||||
const cookieString = Array.isArray(setCookie) ? setCookie.join('\n') : String(setCookie ?? '');
|
||||
assert.ok(cookieString.includes('XSRF-TOKEN='));
|
||||
assert.ok(cookieString.includes(String(rotated)));
|
||||
});
|
||||
|
||||
it('rejects token reuse', async () => {
|
||||
const sessionId = 'session-1';
|
||||
const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken(sessionId);
|
||||
|
||||
const req1: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } };
|
||||
const res1 = createMockRes();
|
||||
assert.equal(await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req: req1, res: res1 }), true);
|
||||
|
||||
const req2: any = { method: 'POST', headers: { cookie: `ccw_session_id=${sessionId}; XSRF-TOKEN=${token}` } };
|
||||
const res2 = createMockRes();
|
||||
assert.equal(await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req: req2, res: res2 }), false);
|
||||
assert.equal(res2.status, 403);
|
||||
});
|
||||
|
||||
it('accepts valid CSRF token from JSON body when cookies are absent', async () => {
|
||||
const sessionId = 'session-1';
|
||||
const manager = csrfManager.getCsrfTokenManager({ cleanupIntervalMs: 0 });
|
||||
const token = manager.generateToken(sessionId);
|
||||
|
||||
const req = new PassThrough() as any;
|
||||
req.method = 'POST';
|
||||
req.headers = { cookie: `ccw_session_id=${sessionId}` };
|
||||
|
||||
const res = createMockRes();
|
||||
const promise = middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res });
|
||||
queueMicrotask(() => {
|
||||
req.end(JSON.stringify({ csrfToken: token }));
|
||||
});
|
||||
|
||||
const ok = await promise;
|
||||
assert.equal(ok, true);
|
||||
});
|
||||
|
||||
it('skips CSRF validation when CCW_DISABLE_CSRF is enabled', async () => {
|
||||
process.env.CCW_DISABLE_CSRF = 'true';
|
||||
const req: any = { method: 'POST', headers: {} };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = await middleware.csrfValidation({ pathname: '/api/remove-recent-path', req, res });
|
||||
assert.equal(ok, true);
|
||||
});
|
||||
});
|
||||
167
ccw/tests/files-routes.test.ts
Normal file
167
ccw/tests/files-routes.test.ts
Normal file
@@ -0,0 +1,167 @@
|
||||
/**
|
||||
* Integration tests for files routes path validation.
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Focuses on access control for user-provided file paths.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-files-routes-project-'));
|
||||
const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-files-routes-outside-'));
|
||||
|
||||
const filesRoutesUrl = new URL('../dist/core/routes/files-routes.js', import.meta.url);
|
||||
filesRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleFilesRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
return { server, baseUrl: `http://127.0.0.1:${port}` };
|
||||
}
|
||||
|
||||
describe('files routes path validation', async () => {
|
||||
before(async () => {
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(filesRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
rmSync(PROJECT_ROOT, { recursive: true, force: true });
|
||||
rmSync(OUTSIDE_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/files rejects paths outside initialPath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(OUTSIDE_ROOT)}`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
assert.equal(Array.isArray(res.json.files), true);
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/file-content rejects paths outside initialPath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(join(OUTSIDE_ROOT, 'secret.txt'))}`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('POST /api/update-claude-md rejects paths outside initialPath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'POST', '/api/update-claude-md', { path: OUTSIDE_ROOT, tool: 'gemini', strategy: 'single-layer' });
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
145
ccw/tests/graph-routes.test.ts
Normal file
145
ccw/tests/graph-routes.test.ts
Normal file
@@ -0,0 +1,145 @@
|
||||
/**
|
||||
* Integration tests for graph routes path validation.
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Focuses on path validation behavior (rejects paths outside initialPath).
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-graph-routes-project-'));
|
||||
const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-graph-routes-outside-'));
|
||||
|
||||
const graphRoutesUrl = new URL('../dist/core/routes/graph-routes.js', import.meta.url);
|
||||
graphRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleGraphRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
return { server, baseUrl: `http://127.0.0.1:${port}` };
|
||||
}
|
||||
|
||||
describe('graph routes path validation', async () => {
|
||||
before(async () => {
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(graphRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
rmSync(PROJECT_ROOT, { recursive: true, force: true });
|
||||
rmSync(OUTSIDE_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/graph/nodes rejects paths outside initialPath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/graph/nodes?path=${encodeURIComponent(OUTSIDE_ROOT)}`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
assert.equal(Array.isArray(res.json.nodes), true);
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
146
ccw/tests/integration/ccw-routes.test.ts
Normal file
146
ccw/tests/integration/ccw-routes.test.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
/**
|
||||
* Integration tests for CCW routes (installations/tools).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Exercises real HTTP request/response flow via a minimal test server.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
|
||||
const ccwRoutesUrl = new URL('../../dist/core/routes/ccw-routes.js', import.meta.url);
|
||||
ccwRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, path: string): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{ method, headers: { Accept: 'application/json' } },
|
||||
(res) => {
|
||||
let body = '';
|
||||
res.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = body ? JSON.parse(body) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: body });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('ccw routes integration', async () => {
|
||||
let server: http.Server | null = null;
|
||||
let baseUrl = '';
|
||||
|
||||
before(async () => {
|
||||
mod = await import(ccwRoutesUrl.href);
|
||||
|
||||
server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath: process.cwd(),
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleCcwRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server!.listen(0, () => resolve());
|
||||
});
|
||||
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
baseUrl = `http://127.0.0.1:${port}`;
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
if (!server) return;
|
||||
await new Promise<void>((resolve) => server!.close(() => resolve()));
|
||||
});
|
||||
|
||||
it('GET /api/ccw/installations returns installation manifests', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/ccw/installations');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(Array.isArray(res.json.installations), true);
|
||||
});
|
||||
|
||||
it('GET /api/ccw/tools returns available tools', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/ccw/tools');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(Array.isArray(res.json.tools), true);
|
||||
});
|
||||
|
||||
it('GET /api/ccw/upgrade returns 404 (POST-only endpoint)', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/ccw/upgrade');
|
||||
assert.equal(res.status, 404);
|
||||
assert.ok(res.json?.error);
|
||||
});
|
||||
|
||||
it('returns 404 for unknown /api/ccw/* routes', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/ccw/nope');
|
||||
assert.equal(res.status, 404);
|
||||
assert.ok(res.json?.error);
|
||||
});
|
||||
});
|
||||
|
||||
272
ccw/tests/integration/claude-routes.test.ts
Normal file
272
ccw/tests/integration/claude-routes.test.ts
Normal file
@@ -0,0 +1,272 @@
|
||||
/**
|
||||
* Integration tests for CLAUDE.md routes (scan + CRUD).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses temporary HOME/USERPROFILE to isolate user-level files.
|
||||
* - Uses a temporary project root as initialPath for project/module operations.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { existsSync, mkdirSync, mkdtempSync, readdirSync, readFileSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const claudeRoutesUrl = new URL('../../dist/core/routes/claude-routes.js', import.meta.url);
|
||||
claudeRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(
|
||||
baseUrl: string,
|
||||
method: string,
|
||||
path: string,
|
||||
body?: unknown,
|
||||
): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload
|
||||
? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) }
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('claude routes integration', async () => {
|
||||
let server: http.Server | null = null;
|
||||
let baseUrl = '';
|
||||
let homeDir = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-claude-home-'));
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-claude-project-'));
|
||||
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
mod = await import(claudeRoutesUrl.href);
|
||||
|
||||
server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath: projectRoot,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleClaudeRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server!.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
baseUrl = `http://127.0.0.1:${port}`;
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
if (server) {
|
||||
await new Promise<void>((resolve) => server!.close(() => resolve()));
|
||||
server = null;
|
||||
}
|
||||
|
||||
if (projectRoot) {
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
projectRoot = '';
|
||||
}
|
||||
|
||||
if (homeDir) {
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
homeDir = '';
|
||||
}
|
||||
});
|
||||
|
||||
it('POST /api/memory/claude/create creates a project-level CLAUDE.md', async () => {
|
||||
const res = await requestJson(baseUrl, 'POST', '/api/memory/claude/create', { level: 'project', template: 'minimal' });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.ok(typeof res.json.path === 'string' && res.json.path.endsWith('CLAUDE.md'));
|
||||
assert.equal(existsSync(res.json.path), true);
|
||||
});
|
||||
|
||||
it('GET /api/memory/claude/file parses frontmatter for project CLAUDE.md', async () => {
|
||||
const claudePath = join(projectRoot, '.claude', 'CLAUDE.md');
|
||||
mkdirSync(join(projectRoot, '.claude'), { recursive: true });
|
||||
writeFileSync(
|
||||
claudePath,
|
||||
['---', 'paths: [src, docs]', '---', '', '# Project Rules', '', 'ok'].join('\n'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/memory/claude/file?path=${encodeURIComponent(claudePath)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.level, 'project');
|
||||
assert.deepEqual(res.json.frontmatter?.paths, ['src', 'docs']);
|
||||
assert.match(res.json.content, /# Project Rules/);
|
||||
assert.equal(String(res.json.content).includes('paths:'), false);
|
||||
});
|
||||
|
||||
it('POST /api/memory/claude/file saves updated content', async () => {
|
||||
const claudePath = join(projectRoot, '.claude', 'CLAUDE.md');
|
||||
mkdirSync(join(projectRoot, '.claude'), { recursive: true });
|
||||
writeFileSync(claudePath, 'before\n', 'utf8');
|
||||
|
||||
const res = await requestJson(baseUrl, 'POST', '/api/memory/claude/file', { path: claudePath, content: 'after\n' });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.equal(readFileSync(claudePath, 'utf8'), 'after\n');
|
||||
});
|
||||
|
||||
it('GET /api/memory/claude/scan separates user/project/module levels', async () => {
|
||||
const userClaudePath = join(homeDir, '.claude', 'CLAUDE.md');
|
||||
mkdirSync(join(homeDir, '.claude'), { recursive: true });
|
||||
writeFileSync(userClaudePath, '# User CLAUDE\n', 'utf8');
|
||||
|
||||
const projectClaudePath = join(projectRoot, '.claude', 'CLAUDE.md');
|
||||
mkdirSync(join(projectRoot, '.claude'), { recursive: true });
|
||||
writeFileSync(projectClaudePath, ['---', 'paths: [src]', '---', '', '# Project CLAUDE'].join('\n'), 'utf8');
|
||||
|
||||
const moduleDir = join(projectRoot, 'module-a');
|
||||
mkdirSync(moduleDir, { recursive: true });
|
||||
writeFileSync(join(moduleDir, 'CLAUDE.md'), '# Module CLAUDE\n', 'utf8');
|
||||
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/memory/claude/scan?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.user?.main?.level, 'user');
|
||||
assert.ok(String(res.json.user.main.path).includes(homeDir));
|
||||
|
||||
assert.equal(res.json.project?.main?.level, 'project');
|
||||
assert.ok(String(res.json.project.main.path).includes(projectRoot));
|
||||
assert.deepEqual(res.json.project.main.frontmatter?.paths, ['src']);
|
||||
assert.equal(String(res.json.project.main.content).includes('paths:'), false);
|
||||
|
||||
assert.equal(Array.isArray(res.json.modules), true);
|
||||
assert.ok(res.json.modules.length >= 1);
|
||||
const moduleFile = res.json.modules.find((m: any) => String(m.path).includes('module-a'));
|
||||
assert.ok(moduleFile);
|
||||
assert.equal(moduleFile.level, 'module');
|
||||
assert.equal(moduleFile.parentDirectory, 'module-a');
|
||||
});
|
||||
|
||||
it('DELETE /api/memory/claude/file requires confirm=true', async () => {
|
||||
const moduleDir = join(projectRoot, 'module-del');
|
||||
const moduleFilePath = join(moduleDir, 'CLAUDE.md');
|
||||
mkdirSync(moduleDir, { recursive: true });
|
||||
writeFileSync(moduleFilePath, '# To delete\n', 'utf8');
|
||||
|
||||
const res = await requestJson(baseUrl, 'DELETE', `/api/memory/claude/file?path=${encodeURIComponent(moduleFilePath)}`);
|
||||
assert.equal(res.status, 400);
|
||||
assert.equal(res.json?.error, 'Confirmation required');
|
||||
assert.equal(existsSync(moduleFilePath), true);
|
||||
});
|
||||
|
||||
it('DELETE /api/memory/claude/file deletes the file and creates a backup', async () => {
|
||||
const moduleDir = join(projectRoot, 'module-del-ok');
|
||||
const moduleFilePath = join(moduleDir, 'CLAUDE.md');
|
||||
mkdirSync(moduleDir, { recursive: true });
|
||||
writeFileSync(moduleFilePath, '# Bye\n', 'utf8');
|
||||
|
||||
const res = await requestJson(
|
||||
baseUrl,
|
||||
'DELETE',
|
||||
`/api/memory/claude/file?path=${encodeURIComponent(moduleFilePath)}&confirm=true`,
|
||||
);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.equal(existsSync(moduleFilePath), false);
|
||||
|
||||
const backups = readdirSync(moduleDir).filter((name) => name.startsWith('CLAUDE.md.deleted-'));
|
||||
assert.equal(backups.length, 1);
|
||||
});
|
||||
});
|
||||
|
||||
206
ccw/tests/integration/files-routes.test.ts
Normal file
206
ccw/tests/integration/files-routes.test.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
/**
|
||||
* Integration tests for files routes (directory listing + file preview).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses a temporary project directory as the allowed root (initialPath).
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const filesRoutesUrl = new URL('../../dist/core/routes/files-routes.js', import.meta.url);
|
||||
filesRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, path: string): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{ method, headers: { Accept: 'application/json' } },
|
||||
(res) => {
|
||||
let body = '';
|
||||
res.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = body ? JSON.parse(body) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: body });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
describe('files routes integration', async () => {
|
||||
let server: http.Server | null = null;
|
||||
let baseUrl = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-files-routes-project-'));
|
||||
|
||||
mkdirSync(join(projectRoot, 'subdir'), { recursive: true });
|
||||
mkdirSync(join(projectRoot, '.claude'), { recursive: true });
|
||||
mkdirSync(join(projectRoot, '.workflow'), { recursive: true });
|
||||
mkdirSync(join(projectRoot, 'node_modules'), { recursive: true });
|
||||
mkdirSync(join(projectRoot, 'ignored-dir'), { recursive: true });
|
||||
|
||||
writeFileSync(join(projectRoot, 'visible.txt'), 'ok\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, 'ignored.txt'), 'nope\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, '.secret'), 'hidden\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, 'readme.md'), '# Hello\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, '.gitignore'), ['ignored.txt', 'ignored-dir/'].join('\n') + '\n', 'utf8');
|
||||
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(filesRoutesUrl.href);
|
||||
|
||||
server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath: projectRoot,
|
||||
handlePostRequest,
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleFilesRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server!.listen(0, () => resolve());
|
||||
});
|
||||
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
baseUrl = `http://127.0.0.1:${port}`;
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
mock.restoreAll();
|
||||
if (server) {
|
||||
await new Promise<void>((resolve) => server!.close(() => resolve()));
|
||||
server = null;
|
||||
}
|
||||
if (projectRoot) {
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
projectRoot = '';
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/files lists entries and respects gitignore/exclude rules', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(Array.isArray(res.json.files), true);
|
||||
|
||||
const names = res.json.files.map((f: any) => f.name);
|
||||
assert.ok(names.includes('subdir'));
|
||||
assert.ok(names.includes('visible.txt'));
|
||||
assert.ok(names.includes('.claude'));
|
||||
assert.ok(names.includes('.workflow'));
|
||||
|
||||
// Hidden dotfiles (except .claude/.workflow) are excluded.
|
||||
assert.equal(names.includes('.secret'), false);
|
||||
// Common excluded dirs are always removed.
|
||||
assert.equal(names.includes('node_modules'), false);
|
||||
// .gitignore patterns should be enforced.
|
||||
assert.equal(names.includes('ignored.txt'), false);
|
||||
assert.equal(names.includes('ignored-dir'), false);
|
||||
assert.equal(Array.isArray(res.json.gitignorePatterns), true);
|
||||
assert.ok(res.json.gitignorePatterns.includes('ignored.txt'));
|
||||
});
|
||||
|
||||
it('GET /api/files returns 400 for non-existent path', async () => {
|
||||
const missing = join(projectRoot, 'missing-dir');
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(missing)}`);
|
||||
assert.equal(res.status, 400);
|
||||
assert.equal(res.json?.error, 'Invalid path');
|
||||
assert.equal(Array.isArray(res.json?.files), true);
|
||||
assert.equal(res.json.files.length, 0);
|
||||
});
|
||||
|
||||
it('GET /api/files blocks traversal outside initialPath', async () => {
|
||||
const outside = join(projectRoot, '..');
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/files?path=${encodeURIComponent(outside)}`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json?.error, 'Access denied');
|
||||
});
|
||||
|
||||
it('GET /api/file-content returns preview content for files', async () => {
|
||||
const target = join(projectRoot, 'readme.md');
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(target)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(res.json.fileName, 'readme.md');
|
||||
assert.equal(res.json.language, 'markdown');
|
||||
assert.equal(res.json.isMarkdown, true);
|
||||
assert.ok(String(res.json.content).includes('# Hello'));
|
||||
});
|
||||
|
||||
it('GET /api/file-content returns 400 when path is missing', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/file-content');
|
||||
assert.equal(res.status, 400);
|
||||
assert.ok(res.json?.error);
|
||||
});
|
||||
|
||||
it('GET /api/file-content returns 404 when path is a directory', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/file-content?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.status, 404);
|
||||
assert.equal(res.json?.error, 'Cannot read directory');
|
||||
});
|
||||
});
|
||||
93
ccw/tests/integration/graph-routes.test.ts
Normal file
93
ccw/tests/integration/graph-routes.test.ts
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Integration tests for graph routes (CodexLens graph API helpers).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
*/
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const graphRoutesUrl = new URL('../../dist/core/routes/graph-routes.js', import.meta.url);
|
||||
graphRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
async function callGraph(
|
||||
projectRoot: string,
|
||||
path: string,
|
||||
): Promise<{ handled: boolean; status: number; json: any }> {
|
||||
const url = new URL(path, 'http://localhost');
|
||||
let status = 0;
|
||||
let body = '';
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
body = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handled = await mod.handleGraphRoutes({
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method: 'GET' },
|
||||
res,
|
||||
initialPath: projectRoot,
|
||||
});
|
||||
|
||||
return { handled, status, json: body ? JSON.parse(body) : null };
|
||||
}
|
||||
|
||||
describe('graph routes integration', async () => {
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-graph-project-'));
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(graphRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
if (projectRoot) {
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
projectRoot = '';
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/graph/search-process returns placeholder pipeline data', async () => {
|
||||
const res = await callGraph(projectRoot, '/api/graph/search-process');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.stages), true);
|
||||
assert.equal(res.json.stages.length, 5);
|
||||
assert.equal(typeof res.json.message, 'string');
|
||||
});
|
||||
|
||||
it('GET /api/graph/files returns empty lists when no index exists', async () => {
|
||||
const res = await callGraph(projectRoot, `/api/graph/files?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.files), true);
|
||||
assert.equal(Array.isArray(res.json.modules), true);
|
||||
assert.equal(res.json.files.length, 0);
|
||||
assert.equal(res.json.modules.length, 0);
|
||||
});
|
||||
|
||||
it('GET /api/graph/impact validates required symbol parameter', async () => {
|
||||
const res = await callGraph(projectRoot, `/api/graph/impact?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 400);
|
||||
assert.ok(String(res.json.error).includes('symbol'));
|
||||
assert.equal(Array.isArray(res.json.directDependents), true);
|
||||
assert.equal(Array.isArray(res.json.affectedFiles), true);
|
||||
});
|
||||
});
|
||||
|
||||
174
ccw/tests/integration/help-routes.test.ts
Normal file
174
ccw/tests/integration/help-routes.test.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* Integration tests for help routes (command guide + CodexLens docs).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Avoids spinning up a real HTTP server; calls route handler directly.
|
||||
* - Uses a temporary HOME/USERPROFILE to isolate ~/.claude/skills/command-guide/index data.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const helpRoutesUrl = new URL('../../dist/core/routes/help-routes.js', import.meta.url);
|
||||
helpRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
async function callRoute(path: string): Promise<{ handled: boolean; status: number; json: any; text: string }> {
|
||||
const url = new URL(path, 'http://localhost');
|
||||
let status = 0;
|
||||
let text = '';
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
text = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const ctx = {
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method: 'GET' },
|
||||
res,
|
||||
};
|
||||
|
||||
const handled = await mod.handleHelpRoutes(ctx);
|
||||
|
||||
let json: any = null;
|
||||
try {
|
||||
json = text ? JSON.parse(text) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
|
||||
return { handled, status, json, text };
|
||||
}
|
||||
|
||||
describe('help routes integration', async () => {
|
||||
let homeDir = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-help-home-'));
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'warn', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
const indexDir = join(homeDir, '.claude', 'skills', 'command-guide', 'index');
|
||||
mkdirSync(indexDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(indexDir, 'all-commands.json'),
|
||||
JSON.stringify(
|
||||
[
|
||||
{ name: 'Issue Next', command: 'ccw issue next', description: 'Fetch next item', category: 'issue', subcategory: 'queue' },
|
||||
{ name: 'Serve', command: 'ccw serve', description: 'Start dashboard server', category: 'core' },
|
||||
],
|
||||
null,
|
||||
2,
|
||||
),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
join(indexDir, 'command-relationships.json'),
|
||||
JSON.stringify({ workflows: [{ name: 'Issue Queue', commands: ['ccw issue next', 'ccw issue done'] }] }, null, 2),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
writeFileSync(
|
||||
join(indexDir, 'by-category.json'),
|
||||
JSON.stringify({ issue: ['ccw issue next'], core: ['ccw serve'] }, null, 2),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
mod = await import(helpRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
const activeHandles: any[] = (process as any)._getActiveHandles?.() || [];
|
||||
for (const handle of activeHandles) {
|
||||
if (handle?.constructor?.name === 'FSWatcher' && typeof handle.close === 'function') {
|
||||
try {
|
||||
handle.close();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (homeDir) {
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
homeDir = '';
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/help/commands returns commands and grouped categories', async () => {
|
||||
const res = await callRoute('/api/help/commands');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.commands), true);
|
||||
assert.equal(res.json.total, 2);
|
||||
assert.equal(typeof res.json.grouped, 'object');
|
||||
assert.ok(res.json.grouped.issue);
|
||||
});
|
||||
|
||||
it('GET /api/help/commands?q filters commands by search query', async () => {
|
||||
const res = await callRoute('/api/help/commands?q=issue');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.total, 1);
|
||||
assert.equal(res.json.commands[0].command, 'ccw issue next');
|
||||
});
|
||||
|
||||
it('GET /api/help/workflows returns workflow relationships data', async () => {
|
||||
const res = await callRoute('/api/help/workflows');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.workflows), true);
|
||||
assert.equal(res.json.workflows[0].name, 'Issue Queue');
|
||||
});
|
||||
|
||||
it('GET /api/help/commands/by-category returns category index data', async () => {
|
||||
const res = await callRoute('/api/help/commands/by-category');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.issue), true);
|
||||
assert.equal(res.json.issue[0], 'ccw issue next');
|
||||
});
|
||||
|
||||
it('GET /api/help/codexlens returns CodexLens quick start content', async () => {
|
||||
const res = await callRoute('/api/help/codexlens');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.title, 'CodexLens Quick Start');
|
||||
assert.equal(Array.isArray(res.json.sections), true);
|
||||
assert.ok(res.json.sections.length > 0);
|
||||
});
|
||||
});
|
||||
|
||||
159
ccw/tests/integration/hooks-routes.test.ts
Normal file
159
ccw/tests/integration/hooks-routes.test.ts
Normal file
@@ -0,0 +1,159 @@
|
||||
/**
|
||||
* Integration tests for hooks routes (hooks configuration CRUD).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses temporary HOME/USERPROFILE for global settings isolation.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const hooksRoutesUrl = new URL('../../dist/core/routes/hooks-routes.js', import.meta.url);
|
||||
hooksRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
async function callHooks(
|
||||
initialPath: string,
|
||||
method: string,
|
||||
pathname: string,
|
||||
body?: any,
|
||||
): Promise<{ handled: boolean; status: number; json: any }> {
|
||||
const url = new URL(pathname, 'http://localhost');
|
||||
let status = 0;
|
||||
let text = '';
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
text = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handlePostRequest = async (_req: any, _res: any, handler: (parsed: any) => Promise<any>) => {
|
||||
const result = await handler(body ?? {});
|
||||
if (result && typeof result === 'object' && typeof result.error === 'string' && result.error.length > 0) {
|
||||
res.writeHead(typeof result.status === 'number' ? result.status : 500);
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
return;
|
||||
}
|
||||
res.writeHead(200);
|
||||
res.end(JSON.stringify(result));
|
||||
};
|
||||
|
||||
const handled = await mod.handleHooksRoutes({
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method },
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
extractSessionIdFromPath() {
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
return { handled, status, json: text ? JSON.parse(text) : null };
|
||||
}
|
||||
|
||||
describe('hooks routes integration', async () => {
|
||||
let homeDir = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-hooks-home-'));
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-hooks-project-'));
|
||||
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'warn', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
mod = await import(hooksRoutesUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
rmSync(join(homeDir, '.claude'), { recursive: true, force: true });
|
||||
rmSync(join(projectRoot, '.claude'), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/hooks returns global and project hook configs', async () => {
|
||||
const res = await callHooks(projectRoot, 'GET', '/api/hooks');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.ok(res.json.global);
|
||||
assert.ok(res.json.project);
|
||||
assert.deepEqual(res.json.global.hooks, {});
|
||||
assert.deepEqual(res.json.project.hooks, {});
|
||||
});
|
||||
|
||||
it('POST /api/hooks saves a global hook and GET reflects it', async () => {
|
||||
const save = await callHooks(projectRoot, 'POST', '/api/hooks', {
|
||||
scope: 'global',
|
||||
event: 'PreToolUse',
|
||||
hookData: { command: 'echo hi' },
|
||||
});
|
||||
assert.equal(save.handled, true);
|
||||
assert.equal(save.status, 200);
|
||||
assert.equal(save.json.success, true);
|
||||
|
||||
const read = await callHooks(projectRoot, 'GET', '/api/hooks');
|
||||
assert.equal(read.status, 200);
|
||||
assert.equal(Array.isArray(read.json.global.hooks.PreToolUse), true);
|
||||
assert.equal(read.json.global.hooks.PreToolUse.length, 1);
|
||||
assert.equal(read.json.global.hooks.PreToolUse[0].command, 'echo hi');
|
||||
});
|
||||
|
||||
it('DELETE /api/hooks removes a hook by index', async () => {
|
||||
await callHooks(projectRoot, 'POST', '/api/hooks', {
|
||||
scope: 'global',
|
||||
event: 'PreToolUse',
|
||||
hookData: { command: 'echo hi' },
|
||||
});
|
||||
|
||||
const del = await callHooks(projectRoot, 'DELETE', '/api/hooks', {
|
||||
scope: 'global',
|
||||
event: 'PreToolUse',
|
||||
hookIndex: 0,
|
||||
});
|
||||
assert.equal(del.status, 200);
|
||||
assert.equal(del.json.success, true);
|
||||
|
||||
const read = await callHooks(projectRoot, 'GET', '/api/hooks');
|
||||
assert.equal(read.status, 200);
|
||||
assert.deepEqual(read.json.global.hooks, {});
|
||||
});
|
||||
});
|
||||
|
||||
296
ccw/tests/integration/issue-routes.test.ts
Normal file
296
ccw/tests/integration/issue-routes.test.ts
Normal file
@@ -0,0 +1,296 @@
|
||||
/**
|
||||
* Integration tests for issue routes (issues + solutions + queue).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses a temporary project root to isolate `.workflow/issues` JSONL storage.
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { existsSync, mkdtempSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const issueRoutesUrl = new URL('../../dist/core/routes/issue-routes.js', import.meta.url);
|
||||
issueRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(
|
||||
baseUrl: string,
|
||||
method: string,
|
||||
path: string,
|
||||
body?: unknown,
|
||||
): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload
|
||||
? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) }
|
||||
: {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: any) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function readJsonl(path: string): any[] {
|
||||
if (!existsSync(path)) return [];
|
||||
return readFileSync(path, 'utf8')
|
||||
.split('\n')
|
||||
.filter((line) => line.trim().length > 0)
|
||||
.map((line) => JSON.parse(line));
|
||||
}
|
||||
|
||||
describe('issue routes integration', async () => {
|
||||
let server: http.Server | null = null;
|
||||
let baseUrl = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-issue-routes-project-'));
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
mod = await import(issueRoutesUrl.href);
|
||||
|
||||
server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath: projectRoot,
|
||||
handlePostRequest,
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleIssueRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server!.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
baseUrl = `http://127.0.0.1:${port}`;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
rmSync(join(projectRoot, '.workflow'), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
mock.restoreAll();
|
||||
if (server) {
|
||||
await new Promise<void>((resolve) => server!.close(() => resolve()));
|
||||
server = null;
|
||||
}
|
||||
if (projectRoot) {
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
projectRoot = '';
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/issues returns empty issues list with metadata', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/issues');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(Array.isArray(res.json.issues), true);
|
||||
assert.equal(res.json.issues.length, 0);
|
||||
assert.equal(res.json._metadata.storage, 'jsonl');
|
||||
});
|
||||
|
||||
it('POST /api/issues creates a new issue and writes JSONL', async () => {
|
||||
const issueId = 'ISS-IR-1';
|
||||
const res = await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Issue routes test' });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.equal(res.json.issue.id, issueId);
|
||||
|
||||
const issuesPath = join(projectRoot, '.workflow', 'issues', 'issues.jsonl');
|
||||
const lines = readJsonl(issuesPath);
|
||||
assert.equal(lines.length, 1);
|
||||
assert.equal(lines[0].id, issueId);
|
||||
assert.equal(typeof lines[0].created_at, 'string');
|
||||
});
|
||||
|
||||
it('GET /api/issues returns enriched issue list with counts', async () => {
|
||||
const issueId = 'ISS-IR-2';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Counts' });
|
||||
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/issues');
|
||||
assert.equal(res.status, 200);
|
||||
const issue = res.json.issues.find((i: any) => i.id === issueId);
|
||||
assert.ok(issue);
|
||||
assert.equal(issue.solution_count, 0);
|
||||
assert.equal(issue.task_count, 0);
|
||||
});
|
||||
|
||||
it('GET /api/issues/:id returns issue detail with solutions/tasks arrays', async () => {
|
||||
const issueId = 'ISS-IR-3';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Detail' });
|
||||
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/issues/${encodeURIComponent(issueId)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.id, issueId);
|
||||
assert.equal(Array.isArray(res.json.solutions), true);
|
||||
assert.equal(Array.isArray(res.json.tasks), true);
|
||||
assert.equal(res.json.solutions.length, 0);
|
||||
assert.equal(res.json.tasks.length, 0);
|
||||
});
|
||||
|
||||
it('POST /api/issues/:id/solutions appends a solution to solutions JSONL', async () => {
|
||||
const issueId = 'ISS-IR-4';
|
||||
const solutionId = 'SOL-ISS-IR-4-1';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Solution add' });
|
||||
|
||||
const tasks = [{ id: 'T1', title: 'Do thing' }];
|
||||
const res = await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.equal(res.json.solution.id, solutionId);
|
||||
assert.equal(res.json.solution.is_bound, false);
|
||||
|
||||
const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`);
|
||||
const lines = readJsonl(solutionsPath);
|
||||
assert.equal(lines.length, 1);
|
||||
assert.equal(lines[0].id, solutionId);
|
||||
assert.equal(Array.isArray(lines[0].tasks), true);
|
||||
});
|
||||
|
||||
it('PATCH /api/issues/:id binds solution and updates planned status', async () => {
|
||||
const issueId = 'ISS-IR-5';
|
||||
const solutionId = 'SOL-ISS-IR-5-1';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Bind' });
|
||||
await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1' }] });
|
||||
|
||||
const res = await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}`, { bound_solution_id: solutionId });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.ok(res.json.updated.includes('bound_solution_id'));
|
||||
|
||||
const detail = await requestJson(baseUrl, 'GET', `/api/issues/${encodeURIComponent(issueId)}`);
|
||||
assert.equal(detail.status, 200);
|
||||
assert.equal(detail.json.bound_solution_id, solutionId);
|
||||
assert.equal(detail.json.status, 'planned');
|
||||
assert.ok(detail.json.planned_at);
|
||||
assert.equal(detail.json.tasks.length, 1);
|
||||
|
||||
const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`);
|
||||
const lines = readJsonl(solutionsPath);
|
||||
assert.equal(lines.length, 1);
|
||||
assert.equal(lines[0].is_bound, true);
|
||||
});
|
||||
|
||||
it('PATCH /api/issues/:id/tasks/:taskId updates bound solution task fields', async () => {
|
||||
const issueId = 'ISS-IR-6';
|
||||
const solutionId = 'SOL-ISS-IR-6-1';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Task update' });
|
||||
await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1', status: 'pending' }] });
|
||||
await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}`, { bound_solution_id: solutionId });
|
||||
|
||||
const res = await requestJson(baseUrl, 'PATCH', `/api/issues/${encodeURIComponent(issueId)}/tasks/T1`, { status: 'completed', result: { ok: true } });
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
assert.ok(res.json.updated.includes('status'));
|
||||
assert.ok(res.json.updated.includes('result'));
|
||||
|
||||
const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`);
|
||||
const lines = readJsonl(solutionsPath);
|
||||
const task = lines[0].tasks.find((t: any) => t.id === 'T1');
|
||||
assert.equal(task.status, 'completed');
|
||||
assert.deepEqual(task.result, { ok: true });
|
||||
assert.ok(task.updated_at);
|
||||
});
|
||||
|
||||
it('DELETE /api/issues/:id removes issue and deletes solutions JSONL', async () => {
|
||||
const issueId = 'ISS-IR-7';
|
||||
const solutionId = 'SOL-ISS-IR-7-1';
|
||||
await requestJson(baseUrl, 'POST', '/api/issues', { id: issueId, title: 'Delete me' });
|
||||
await requestJson(baseUrl, 'POST', `/api/issues/${encodeURIComponent(issueId)}/solutions`, { id: solutionId, tasks: [{ id: 'T1' }] });
|
||||
|
||||
const res = await requestJson(baseUrl, 'DELETE', `/api/issues/${encodeURIComponent(issueId)}`);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json?.success, true);
|
||||
|
||||
const issuesPath = join(projectRoot, '.workflow', 'issues', 'issues.jsonl');
|
||||
assert.equal(readJsonl(issuesPath).length, 0);
|
||||
|
||||
const solutionsPath = join(projectRoot, '.workflow', 'issues', 'solutions', `${issueId}.jsonl`);
|
||||
assert.equal(existsSync(solutionsPath), false);
|
||||
});
|
||||
|
||||
it('GET /api/queue returns grouped queue structure', async () => {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/queue');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
assert.equal(Array.isArray(res.json.execution_groups), true);
|
||||
assert.equal(typeof res.json.grouped_items, 'object');
|
||||
});
|
||||
});
|
||||
|
||||
118
ccw/tests/integration/litellm-api-routes.test.ts
Normal file
118
ccw/tests/integration/litellm-api-routes.test.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
/**
|
||||
* Integration tests for LiteLLM API routes (providers + model discovery).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
* - Uses temporary CCW_DATA_DIR to isolate ~/.ccw config writes.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const CCW_HOME = mkdtempSync(join(tmpdir(), 'ccw-litellm-api-home-'));
|
||||
const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-litellm-api-project-'));
|
||||
|
||||
const litellmApiRoutesUrl = new URL('../../dist/core/routes/litellm-api-routes.js', import.meta.url);
|
||||
litellmApiRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = { CCW_DATA_DIR: process.env.CCW_DATA_DIR };
|
||||
|
||||
async function callLiteLLMApi(
|
||||
initialPath: string,
|
||||
method: string,
|
||||
path: string,
|
||||
body?: any,
|
||||
): Promise<{ handled: boolean; status: number; json: any; broadcasts: any[] }> {
|
||||
const url = new URL(path, 'http://localhost');
|
||||
let status = 0;
|
||||
let text = '';
|
||||
const broadcasts: any[] = [];
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
text = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handlePostRequest = async (_req: any, _res: any, handler: (parsed: any) => Promise<any>) => {
|
||||
const result = await handler(body ?? {});
|
||||
const errorValue = result && typeof result === 'object' ? (result as any).error : undefined;
|
||||
const statusValue = result && typeof result === 'object' ? (result as any).status : undefined;
|
||||
|
||||
if (typeof errorValue === 'string' && errorValue.length > 0) {
|
||||
res.writeHead(typeof statusValue === 'number' ? statusValue : 500);
|
||||
res.end(JSON.stringify({ error: errorValue }));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(200);
|
||||
res.end(JSON.stringify(result));
|
||||
};
|
||||
|
||||
const handled = await mod.handleLiteLLMApiRoutes({
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method },
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients(data: unknown) {
|
||||
broadcasts.push(data);
|
||||
},
|
||||
});
|
||||
|
||||
return { handled, status, json: text ? JSON.parse(text) : null, broadcasts };
|
||||
}
|
||||
|
||||
describe('litellm-api routes integration', async () => {
|
||||
before(async () => {
|
||||
process.env.CCW_DATA_DIR = CCW_HOME;
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'warn', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(litellmApiRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.CCW_DATA_DIR = originalEnv.CCW_DATA_DIR;
|
||||
rmSync(CCW_HOME, { recursive: true, force: true });
|
||||
rmSync(PROJECT_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/litellm-api/models/openai returns static model list', async () => {
|
||||
const res = await callLiteLLMApi(PROJECT_ROOT, 'GET', '/api/litellm-api/models/openai');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.providerType, 'openai');
|
||||
assert.equal(Array.isArray(res.json.models), true);
|
||||
assert.ok(res.json.models.length > 0);
|
||||
});
|
||||
|
||||
it('GET /api/litellm-api/providers returns default empty config', async () => {
|
||||
const res = await callLiteLLMApi(PROJECT_ROOT, 'GET', '/api/litellm-api/providers');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.providers), true);
|
||||
assert.equal(typeof res.json.count, 'number');
|
||||
});
|
||||
|
||||
it('POST /api/litellm-api/providers validates required fields', async () => {
|
||||
const res = await callLiteLLMApi(PROJECT_ROOT, 'POST', '/api/litellm-api/providers', { name: 'x' });
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 400);
|
||||
assert.ok(String(res.json.error).includes('required'));
|
||||
assert.equal(res.broadcasts.length, 0);
|
||||
});
|
||||
});
|
||||
|
||||
182
ccw/tests/integration/nav-status-routes.test.ts
Normal file
182
ccw/tests/integration/nav-status-routes.test.ts
Normal file
@@ -0,0 +1,182 @@
|
||||
/**
|
||||
* Integration tests for nav-status routes (badge count aggregation).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
* - Uses temporary HOME/USERPROFILE and project root to isolate filesystem reads.
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { existsSync, mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const navStatusRoutesUrl = new URL('../../dist/core/routes/nav-status-routes.js', import.meta.url);
|
||||
navStatusRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
async function getNavStatus(projectRoot: string): Promise<{ status: number; json: any }> {
|
||||
const url = new URL('/api/nav-status', 'http://localhost');
|
||||
let status = 0;
|
||||
let body = '';
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
body = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handled = await mod.handleNavStatusRoutes({
|
||||
pathname: '/api/nav-status',
|
||||
url,
|
||||
req: { method: 'GET' },
|
||||
res,
|
||||
initialPath: projectRoot,
|
||||
});
|
||||
|
||||
assert.equal(handled, true);
|
||||
return { status, json: JSON.parse(body) };
|
||||
}
|
||||
|
||||
describe('nav-status routes integration', async () => {
|
||||
let homeDir = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-nav-home-'));
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-nav-project-'));
|
||||
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(navStatusRoutesUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset relevant trees per test.
|
||||
rmSync(join(projectRoot, '.workflow'), { recursive: true, force: true });
|
||||
rmSync(join(projectRoot, '.claude'), { recursive: true, force: true });
|
||||
rmSync(join(homeDir, '.claude'), { recursive: true, force: true });
|
||||
|
||||
const rootClaude = join(projectRoot, 'CLAUDE.md');
|
||||
if (existsSync(rootClaude)) rmSync(rootClaude, { force: true });
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('returns zero counts when no data exists', async () => {
|
||||
const res = await getNavStatus(projectRoot);
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json);
|
||||
|
||||
for (const key of ['issues', 'discoveries', 'skills', 'rules', 'claude', 'hooks', 'timestamp']) {
|
||||
assert.ok(Object.prototype.hasOwnProperty.call(res.json, key), `missing key: ${key}`);
|
||||
}
|
||||
|
||||
assert.equal(res.json.issues.count, 0);
|
||||
assert.equal(res.json.discoveries.count, 0);
|
||||
assert.equal(res.json.skills.count, 0);
|
||||
assert.equal(res.json.rules.count, 0);
|
||||
assert.equal(res.json.claude.count, 0);
|
||||
assert.equal(res.json.hooks.count, 0);
|
||||
assert.equal(typeof res.json.timestamp, 'string');
|
||||
});
|
||||
|
||||
it('counts issues.jsonl lines and discovery index entries', async () => {
|
||||
const issuesDir = join(projectRoot, '.workflow', 'issues');
|
||||
const discoveriesDir = join(issuesDir, 'discoveries');
|
||||
mkdirSync(discoveriesDir, { recursive: true });
|
||||
|
||||
writeFileSync(join(issuesDir, 'issues.jsonl'), '{"id":"ISS-1"}\n{"id":"ISS-2"}\n', 'utf8');
|
||||
writeFileSync(join(discoveriesDir, 'index.json'), JSON.stringify({ discoveries: [{ id: 'DSC-1' }, { id: 'DSC-2' }, { id: 'DSC-3' }] }), 'utf8');
|
||||
|
||||
const res = await getNavStatus(projectRoot);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.issues.count, 2);
|
||||
assert.equal(res.json.discoveries.count, 3);
|
||||
});
|
||||
|
||||
it('aggregates skills, rules, CLAUDE.md files, and hooks across user/project', async () => {
|
||||
// Skills
|
||||
mkdirSync(join(projectRoot, '.claude', 'skills', 'proj-skill'), { recursive: true });
|
||||
writeFileSync(join(projectRoot, '.claude', 'skills', 'proj-skill', 'SKILL.md'), '# skill\n', 'utf8');
|
||||
mkdirSync(join(homeDir, '.claude', 'skills', 'user-skill-1'), { recursive: true });
|
||||
mkdirSync(join(homeDir, '.claude', 'skills', 'user-skill-2'), { recursive: true });
|
||||
writeFileSync(join(homeDir, '.claude', 'skills', 'user-skill-1', 'SKILL.md'), '# skill\n', 'utf8');
|
||||
writeFileSync(join(homeDir, '.claude', 'skills', 'user-skill-2', 'SKILL.md'), '# skill\n', 'utf8');
|
||||
|
||||
// Rules (recursive)
|
||||
mkdirSync(join(projectRoot, '.claude', 'rules', 'nested'), { recursive: true });
|
||||
writeFileSync(join(projectRoot, '.claude', 'rules', 'a.md'), '# a\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, '.claude', 'rules', 'nested', 'b.md'), '# b\n', 'utf8');
|
||||
mkdirSync(join(homeDir, '.claude', 'rules'), { recursive: true });
|
||||
writeFileSync(join(homeDir, '.claude', 'rules', 'c.md'), '# c\n', 'utf8');
|
||||
|
||||
// CLAUDE.md files (user main + project main + root + module)
|
||||
mkdirSync(join(homeDir, '.claude'), { recursive: true });
|
||||
writeFileSync(join(homeDir, '.claude', 'CLAUDE.md'), '# user\n', 'utf8');
|
||||
mkdirSync(join(projectRoot, '.claude'), { recursive: true });
|
||||
writeFileSync(join(projectRoot, '.claude', 'CLAUDE.md'), '# project\n', 'utf8');
|
||||
writeFileSync(join(projectRoot, 'CLAUDE.md'), '# root\n', 'utf8');
|
||||
const moduleDir = join(projectRoot, 'module-a');
|
||||
mkdirSync(moduleDir, { recursive: true });
|
||||
writeFileSync(join(moduleDir, 'CLAUDE.md'), '# module\n', 'utf8');
|
||||
|
||||
// Hooks in settings.json
|
||||
mkdirSync(join(homeDir, '.claude'), { recursive: true });
|
||||
writeFileSync(
|
||||
join(homeDir, '.claude', 'settings.json'),
|
||||
JSON.stringify({ hooks: { PreToolUse: [{}, {}], PostToolUse: {} } }),
|
||||
'utf8',
|
||||
);
|
||||
writeFileSync(
|
||||
join(projectRoot, '.claude', 'settings.json'),
|
||||
JSON.stringify({ hooks: { PreToolUse: [{}] } }),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
const res = await getNavStatus(projectRoot);
|
||||
assert.equal(res.status, 200);
|
||||
|
||||
assert.equal(res.json.skills.project, 1);
|
||||
assert.equal(res.json.skills.user, 2);
|
||||
assert.equal(res.json.skills.count, 3);
|
||||
|
||||
assert.equal(res.json.rules.project, 2);
|
||||
assert.equal(res.json.rules.user, 1);
|
||||
assert.equal(res.json.rules.count, 3);
|
||||
|
||||
assert.equal(res.json.claude.count, 4);
|
||||
|
||||
assert.equal(res.json.hooks.global, 3);
|
||||
assert.equal(res.json.hooks.project, 1);
|
||||
assert.equal(res.json.hooks.count, 4);
|
||||
});
|
||||
});
|
||||
|
||||
153
ccw/tests/integration/rules-routes.test.ts
Normal file
153
ccw/tests/integration/rules-routes.test.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
/**
|
||||
* Integration tests for rules routes (rules management CRUD).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
* - Uses temporary HOME/USERPROFILE to isolate user rules directory.
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { existsSync, mkdtempSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const rulesRoutesUrl = new URL('../../dist/core/routes/rules-routes.js', import.meta.url);
|
||||
rulesRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
async function callRules(
|
||||
initialPath: string,
|
||||
method: string,
|
||||
path: string,
|
||||
body?: any,
|
||||
): Promise<{ handled: boolean; status: number; json: any }> {
|
||||
const url = new URL(path, 'http://localhost');
|
||||
let status = 0;
|
||||
let text = '';
|
||||
let postPromise: Promise<void> | null = null;
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
text = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handlePostRequest = (_req: any, _res: any, handler: (parsed: any) => Promise<any>) => {
|
||||
postPromise = (async () => {
|
||||
const result = await handler(body ?? {});
|
||||
const errorValue = result && typeof result === 'object' ? (result as any).error : undefined;
|
||||
const statusValue = result && typeof result === 'object' ? (result as any).status : undefined;
|
||||
|
||||
if (typeof errorValue === 'string' && errorValue.length > 0) {
|
||||
res.writeHead(typeof statusValue === 'number' ? statusValue : 500);
|
||||
res.end(JSON.stringify({ error: errorValue }));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(200);
|
||||
res.end(JSON.stringify(result));
|
||||
})();
|
||||
};
|
||||
|
||||
const handled = await mod.handleRulesRoutes({
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method },
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
});
|
||||
|
||||
if (postPromise) await postPromise;
|
||||
|
||||
return { handled, status, json: text ? JSON.parse(text) : null };
|
||||
}
|
||||
|
||||
describe('rules routes integration', async () => {
|
||||
let homeDir = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-rules-home-'));
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-rules-project-'));
|
||||
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'warn', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
mod = await import(rulesRoutesUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
rmSync(join(homeDir, '.claude'), { recursive: true, force: true });
|
||||
rmSync(join(projectRoot, '.claude'), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/rules returns projectRules and userRules arrays', async () => {
|
||||
const res = await callRules(projectRoot, 'GET', '/api/rules');
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.projectRules), true);
|
||||
assert.equal(Array.isArray(res.json.userRules), true);
|
||||
});
|
||||
|
||||
it('POST /api/rules/create writes a project rule and GET reflects it', async () => {
|
||||
const create = await callRules(projectRoot, 'POST', '/api/rules/create', {
|
||||
fileName: 'test-rule.md',
|
||||
content: '# Hello rule\n',
|
||||
paths: ['src/**'],
|
||||
location: 'project',
|
||||
});
|
||||
|
||||
assert.equal(create.handled, true);
|
||||
assert.equal(create.status, 200);
|
||||
assert.equal(create.json.success, true);
|
||||
assert.ok(typeof create.json.path === 'string' && create.json.path.length > 0);
|
||||
assert.equal(existsSync(create.json.path), true);
|
||||
|
||||
const config = await callRules(projectRoot, 'GET', '/api/rules');
|
||||
assert.equal(config.status, 200);
|
||||
assert.equal(config.json.projectRules.length, 1);
|
||||
assert.equal(config.json.projectRules[0].name, 'test-rule.md');
|
||||
|
||||
const detail = await callRules(projectRoot, 'GET', '/api/rules/test-rule.md?location=project');
|
||||
assert.equal(detail.status, 200);
|
||||
assert.equal(detail.json.rule.name, 'test-rule.md');
|
||||
assert.ok(String(detail.json.rule.content).includes('Hello rule'));
|
||||
|
||||
// Ensure frontmatter was persisted.
|
||||
const raw = readFileSync(create.json.path, 'utf8');
|
||||
assert.ok(raw.startsWith('---'));
|
||||
assert.ok(raw.includes('paths: [src/**]'));
|
||||
});
|
||||
});
|
||||
140
ccw/tests/integration/skills-routes.test.ts
Normal file
140
ccw/tests/integration/skills-routes.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
/**
|
||||
* Integration tests for skills routes (skills listing + details).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Calls route handler directly (no HTTP server required).
|
||||
* - Uses temporary HOME/USERPROFILE to isolate user skills directory.
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { mkdirSync, mkdtempSync, rmSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const skillsRoutesUrl = new URL('../../dist/core/routes/skills-routes.js', import.meta.url);
|
||||
skillsRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
const originalEnv = {
|
||||
HOME: process.env.HOME,
|
||||
USERPROFILE: process.env.USERPROFILE,
|
||||
HOMEDRIVE: process.env.HOMEDRIVE,
|
||||
HOMEPATH: process.env.HOMEPATH,
|
||||
};
|
||||
|
||||
async function callSkills(
|
||||
initialPath: string,
|
||||
method: string,
|
||||
path: string,
|
||||
): Promise<{ handled: boolean; status: number; json: any }> {
|
||||
const url = new URL(path, 'http://localhost');
|
||||
let status = 0;
|
||||
let body = '';
|
||||
|
||||
const res = {
|
||||
writeHead(code: number) {
|
||||
status = code;
|
||||
},
|
||||
end(chunk?: any) {
|
||||
body = chunk === undefined ? '' : String(chunk);
|
||||
},
|
||||
};
|
||||
|
||||
const handled = await mod.handleSkillsRoutes({
|
||||
pathname: url.pathname,
|
||||
url,
|
||||
req: { method },
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest() {
|
||||
throw new Error('handlePostRequest should not be called for these tests');
|
||||
},
|
||||
});
|
||||
|
||||
return { handled, status, json: body ? JSON.parse(body) : null };
|
||||
}
|
||||
|
||||
describe('skills routes integration', async () => {
|
||||
let homeDir = '';
|
||||
let projectRoot = '';
|
||||
|
||||
before(async () => {
|
||||
homeDir = mkdtempSync(join(tmpdir(), 'ccw-skills-home-'));
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-skills-project-'));
|
||||
|
||||
process.env.HOME = homeDir;
|
||||
process.env.USERPROFILE = homeDir;
|
||||
process.env.HOMEDRIVE = undefined;
|
||||
process.env.HOMEPATH = undefined;
|
||||
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(skillsRoutesUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
rmSync(join(homeDir, '.claude'), { recursive: true, force: true });
|
||||
rmSync(join(projectRoot, '.claude'), { recursive: true, force: true });
|
||||
|
||||
const skillDir = join(projectRoot, '.claude', 'skills', 'test-skill');
|
||||
mkdirSync(skillDir, { recursive: true });
|
||||
writeFileSync(
|
||||
join(skillDir, 'SKILL.md'),
|
||||
`---
|
||||
name: "Test Skill"
|
||||
description: "A test skill"
|
||||
version: "1.0.0"
|
||||
allowed-tools: [ccw issue next]
|
||||
---
|
||||
|
||||
# Test
|
||||
`,
|
||||
'utf8',
|
||||
);
|
||||
writeFileSync(join(skillDir, 'extra.txt'), 'extra', 'utf8');
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
process.env.HOME = originalEnv.HOME;
|
||||
process.env.USERPROFILE = originalEnv.USERPROFILE;
|
||||
process.env.HOMEDRIVE = originalEnv.HOMEDRIVE;
|
||||
process.env.HOMEPATH = originalEnv.HOMEPATH;
|
||||
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(homeDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/skills lists projectSkills and userSkills', async () => {
|
||||
const res = await callSkills(projectRoot, 'GET', `/api/skills?path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(Array.isArray(res.json.projectSkills), true);
|
||||
assert.equal(Array.isArray(res.json.userSkills), true);
|
||||
assert.equal(res.json.projectSkills.length, 1);
|
||||
assert.equal(res.json.projectSkills[0].folderName, 'test-skill');
|
||||
assert.equal(res.json.projectSkills[0].name, 'Test Skill');
|
||||
assert.ok(res.json.projectSkills[0].supportingFiles.includes('extra.txt'));
|
||||
});
|
||||
|
||||
it('GET /api/skills/:name returns skill detail with parsed content', async () => {
|
||||
const res = await callSkills(projectRoot, 'GET', `/api/skills/test-skill?location=project&path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 200);
|
||||
assert.equal(res.json.skill.folderName, 'test-skill');
|
||||
assert.equal(res.json.skill.name, 'Test Skill');
|
||||
assert.equal(Array.isArray(res.json.skill.allowedTools), true);
|
||||
assert.ok(String(res.json.skill.content).includes('# Test'));
|
||||
});
|
||||
|
||||
it('returns 404 when skill is missing', async () => {
|
||||
const res = await callSkills(projectRoot, 'GET', `/api/skills/nope?location=project&path=${encodeURIComponent(projectRoot)}`);
|
||||
assert.equal(res.handled, true);
|
||||
assert.equal(res.status, 404);
|
||||
assert.ok(res.json.error);
|
||||
});
|
||||
});
|
||||
|
||||
1357
ccw/tests/issue-command.test.ts
Normal file
1357
ccw/tests/issue-command.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
119
ccw/tests/middleware.test.ts
Normal file
119
ccw/tests/middleware.test.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
/**
|
||||
* Unit tests for auth middleware (ccw/dist/core/auth/middleware.js)
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
const middlewareUrl = new URL('../dist/core/auth/middleware.js', import.meta.url).href;
|
||||
const tokenManagerUrl = new URL('../dist/core/auth/token-manager.js', import.meta.url).href;
|
||||
|
||||
type MockResponse = {
|
||||
status: number | null;
|
||||
headers: Record<string, string>;
|
||||
body: string;
|
||||
writeHead: (status: number, headers?: Record<string, string>) => void;
|
||||
setHeader: (name: string, value: string) => void;
|
||||
end: (body?: string) => void;
|
||||
};
|
||||
|
||||
function createMockRes(): MockResponse {
|
||||
const headers: Record<string, string> = {};
|
||||
const response: MockResponse = {
|
||||
status: null,
|
||||
headers,
|
||||
body: '',
|
||||
writeHead: (status: number, nextHeaders?: Record<string, string>) => {
|
||||
response.status = status;
|
||||
if (nextHeaders) {
|
||||
for (const [k, v] of Object.entries(nextHeaders)) {
|
||||
headers[k.toLowerCase()] = v;
|
||||
}
|
||||
}
|
||||
},
|
||||
setHeader: (name: string, value: string) => {
|
||||
headers[name.toLowerCase()] = value;
|
||||
},
|
||||
end: (body?: string) => {
|
||||
response.body = body ? String(body) : '';
|
||||
},
|
||||
};
|
||||
return response;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let middleware: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let tokenMod: any;
|
||||
|
||||
describe('auth middleware', async () => {
|
||||
middleware = await import(middlewareUrl);
|
||||
tokenMod = await import(tokenManagerUrl);
|
||||
|
||||
it('rejects requests without tokens', () => {
|
||||
const tokenManager = new tokenMod.TokenManager();
|
||||
const secretKey = 'secret';
|
||||
|
||||
const req: any = { headers: {}, socket: { remoteAddress: '127.0.0.1' } };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = middleware.authMiddleware({
|
||||
pathname: '/api/health',
|
||||
req,
|
||||
res,
|
||||
tokenManager,
|
||||
secretKey,
|
||||
unauthenticatedPaths: new Set(['/api/auth/token']),
|
||||
});
|
||||
|
||||
assert.equal(ok, false);
|
||||
assert.equal(res.status, 401);
|
||||
assert.ok(res.body.includes('Unauthorized'));
|
||||
});
|
||||
|
||||
it('accepts Authorization: Bearer tokens', () => {
|
||||
const tokenManager = new tokenMod.TokenManager();
|
||||
const secretKey = 'secret';
|
||||
const { token } = tokenManager.generateToken(secretKey);
|
||||
|
||||
const req: any = { headers: { authorization: `Bearer ${token}` }, socket: { remoteAddress: '127.0.0.1' } };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = middleware.authMiddleware({
|
||||
pathname: '/api/health',
|
||||
req,
|
||||
res,
|
||||
tokenManager,
|
||||
secretKey,
|
||||
});
|
||||
|
||||
assert.equal(ok, true);
|
||||
assert.equal(req.authenticated, true);
|
||||
});
|
||||
|
||||
it('accepts auth_token cookies', () => {
|
||||
const tokenManager = new tokenMod.TokenManager();
|
||||
const secretKey = 'secret';
|
||||
const { token } = tokenManager.generateToken(secretKey);
|
||||
|
||||
const req: any = { headers: { cookie: `auth_token=${encodeURIComponent(token)}` }, socket: { remoteAddress: '127.0.0.1' } };
|
||||
const res = createMockRes();
|
||||
|
||||
const ok = middleware.authMiddleware({
|
||||
pathname: '/api/health',
|
||||
req,
|
||||
res,
|
||||
tokenManager,
|
||||
secretKey,
|
||||
});
|
||||
|
||||
assert.equal(ok, true);
|
||||
});
|
||||
|
||||
it('isLocalhostRequest detects loopback addresses', () => {
|
||||
assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '127.0.0.1' } } as any), true);
|
||||
assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '::1' } } as any), true);
|
||||
assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '::ffff:127.0.0.1' } } as any), true);
|
||||
assert.equal(middleware.isLocalhostRequest({ socket: { remoteAddress: '10.0.0.5' } } as any), false);
|
||||
});
|
||||
});
|
||||
@@ -177,6 +177,32 @@ describe('path-resolver utility module', async () => {
|
||||
assert.ok(res.error?.includes('Path must be within'));
|
||||
});
|
||||
|
||||
it('validatePath blocks symlink escapes even when target path does not exist', () => {
|
||||
const baseDir = 'C:\\allowed';
|
||||
const linkPath = 'C:\\allowed\\link';
|
||||
setExists(linkPath, true);
|
||||
setDir(linkPath, true);
|
||||
setRealpath(linkPath, 'C:\\secret');
|
||||
|
||||
const res = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir });
|
||||
assert.equal(res.valid, false);
|
||||
assert.equal(res.path, null);
|
||||
assert.ok(res.error?.includes('Path must be within'));
|
||||
});
|
||||
|
||||
it('validatePath allows symlinked parent directories that resolve within baseDir', () => {
|
||||
const baseDir = 'C:\\allowed';
|
||||
const linkPath = 'C:\\allowed\\link';
|
||||
setExists(linkPath, true);
|
||||
setDir(linkPath, true);
|
||||
setRealpath(linkPath, 'C:\\allowed\\real');
|
||||
|
||||
const res = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir });
|
||||
assert.equal(res.valid, true);
|
||||
assert.equal(res.path, path.join('C:\\allowed\\real', 'newfile.txt'));
|
||||
assert.equal(res.error, null);
|
||||
});
|
||||
|
||||
it('validateOutputPath rejects directories and resolves relative output paths', () => {
|
||||
assert.equal(pathResolver.validateOutputPath('').valid, false);
|
||||
|
||||
|
||||
243
ccw/tests/security/command-injection.test.ts
Normal file
243
ccw/tests/security/command-injection.test.ts
Normal file
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* Regression tests for command injection protections in cli-executor.
|
||||
*
|
||||
* Focus: ensure args are escaped on Windows when `shell: true` is required.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { createRequire } from 'node:module';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { PassThrough } from 'node:stream';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const cliExecutorUrl = new URL('../../dist/tools/cli-executor.js', import.meta.url).href;
|
||||
const historyStoreUrl = new URL('../../dist/tools/cli-history-store.js', import.meta.url).href;
|
||||
const shellEscapeUrl = new URL('../../dist/utils/shell-escape.js', import.meta.url).href;
|
||||
|
||||
describe('cli-executor: command injection regression', async () => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
const childProcess = require('child_process');
|
||||
const originalSpawn = childProcess.spawn;
|
||||
|
||||
const originalSetTimeout = globalThis.setTimeout;
|
||||
|
||||
const spawnCalls: Array<{ command: string; args: string[]; options: Record<string, unknown> }> = [];
|
||||
|
||||
const envSnapshot: Record<string, string | undefined> = {};
|
||||
let ccwHome = '';
|
||||
let projectDir = '';
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let cliExecutorModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let historyStoreModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let shellEscapeModule: any;
|
||||
|
||||
function unrefSetTimeout<TArgs extends unknown[]>(
|
||||
fn: (...args: TArgs) => void,
|
||||
delay?: number,
|
||||
...args: TArgs
|
||||
): ReturnType<typeof setTimeout> {
|
||||
const t = originalSetTimeout(fn as (...args: unknown[]) => void, delay as number, ...args);
|
||||
(t as unknown as { unref?: () => void }).unref?.();
|
||||
return t;
|
||||
}
|
||||
|
||||
before(async () => {
|
||||
envSnapshot.CCW_DATA_DIR = process.env.CCW_DATA_DIR;
|
||||
envSnapshot.DEBUG = process.env.DEBUG;
|
||||
envSnapshot.CCW_DEBUG = process.env.CCW_DEBUG;
|
||||
|
||||
ccwHome = mkdtempSync(join(tmpdir(), 'ccw-command-injection-home-'));
|
||||
projectDir = mkdtempSync(join(tmpdir(), 'ccw-command-injection-project-'));
|
||||
process.env.CCW_DATA_DIR = ccwHome;
|
||||
delete process.env.DEBUG;
|
||||
delete process.env.CCW_DEBUG;
|
||||
|
||||
// Prevent long-lived timeouts in the module under test from delaying process exit.
|
||||
globalThis.setTimeout = unrefSetTimeout as unknown as typeof setTimeout;
|
||||
|
||||
shellEscapeModule = await import(shellEscapeUrl);
|
||||
|
||||
// Patch child_process.spawn BEFORE importing cli-executor (it captures spawn at module init).
|
||||
childProcess.spawn = (command: unknown, args: unknown[], options: Record<string, unknown>) => {
|
||||
const cmd = String(command);
|
||||
const argv = Array.isArray(args) ? args.map((a) => String(a)) : [];
|
||||
spawnCalls.push({ command: cmd, args: argv, options: options || {} });
|
||||
|
||||
const child = new EventEmitter() as any;
|
||||
child.pid = 4242;
|
||||
child.killed = false;
|
||||
child.stdin = new PassThrough();
|
||||
child.stdout = new PassThrough();
|
||||
child.stderr = new PassThrough();
|
||||
|
||||
let closed = false;
|
||||
child.kill = () => {
|
||||
child.killed = true;
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
child.emit('close', 0);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
process.nextTick(() => {
|
||||
if (closed) return;
|
||||
if (cmd === 'where' || cmd === 'which') {
|
||||
const tool = argv[0] || 'tool';
|
||||
child.stdout.write(`C:\\\\fake\\\\${tool}.cmd\r\n`);
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
closed = true;
|
||||
child.emit('close', 0);
|
||||
return;
|
||||
}
|
||||
|
||||
child.stdout.write('ok\n');
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
closed = true;
|
||||
child.emit('close', 0);
|
||||
});
|
||||
|
||||
return child;
|
||||
};
|
||||
|
||||
cliExecutorModule = await import(cliExecutorUrl);
|
||||
historyStoreModule = await import(historyStoreUrl);
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
childProcess.spawn = originalSpawn;
|
||||
globalThis.setTimeout = originalSetTimeout;
|
||||
|
||||
try {
|
||||
historyStoreModule?.closeAllStores?.();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
if (projectDir) rmSync(projectDir, { recursive: true, force: true });
|
||||
if (ccwHome) rmSync(ccwHome, { recursive: true, force: true });
|
||||
|
||||
process.env.CCW_DATA_DIR = envSnapshot.CCW_DATA_DIR;
|
||||
if (envSnapshot.DEBUG === undefined) delete process.env.DEBUG;
|
||||
else process.env.DEBUG = envSnapshot.DEBUG;
|
||||
if (envSnapshot.CCW_DEBUG === undefined) delete process.env.CCW_DEBUG;
|
||||
else process.env.CCW_DEBUG = envSnapshot.CCW_DEBUG;
|
||||
});
|
||||
|
||||
it('escapes dangerous metacharacters for Windows shell execution', async () => {
|
||||
const escapeWindowsArg = shellEscapeModule.escapeWindowsArg as (arg: string) => string;
|
||||
|
||||
const cases: Array<{
|
||||
name: string;
|
||||
params: Record<string, unknown>;
|
||||
expectedCommand: string;
|
||||
expectedArgs: string[];
|
||||
}> = [
|
||||
{
|
||||
name: 'gemini: model includes &',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-amp', model: 'gpt-4 & calc' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'gpt-4 & calc'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes |',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-pipe', model: 'gpt|calc' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'gpt|calc'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes >',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-gt', model: 'gpt>out.txt' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'gpt>out.txt'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes <',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-lt', model: 'gpt<input.txt' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'gpt<input.txt'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes parentheses',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-paren', model: '(gpt)' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', '(gpt)'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes %',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-percent', model: '%PATH%' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', '%PATH%'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes !',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-bang', model: '!VAR!' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', '!VAR!'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: model includes caret',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-model-caret', model: 'a^b' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'a^b'],
|
||||
},
|
||||
{
|
||||
name: 'gemini: includeDirs includes spaces and &',
|
||||
params: { tool: 'gemini', prompt: 'hi', cd: projectDir, id: 'case-gemini-include', includeDirs: 'C:\\Program Files\\A & B', model: 'test-model' },
|
||||
expectedCommand: 'gemini',
|
||||
expectedArgs: ['-m', 'test-model', '--include-directories', 'C:\\Program Files\\A & B'],
|
||||
},
|
||||
{
|
||||
name: 'qwen: model includes double quote',
|
||||
params: { tool: 'qwen', prompt: 'hi', cd: projectDir, id: 'case-qwen-model-quote', model: 'qwen\"model' },
|
||||
expectedCommand: 'qwen',
|
||||
expectedArgs: ['-m', 'qwen\"model'],
|
||||
},
|
||||
{
|
||||
name: 'qwen: includeDirs includes |',
|
||||
params: { tool: 'qwen', prompt: 'hi', cd: projectDir, id: 'case-qwen-include-pipe', includeDirs: 'C:\\a|b', model: 'test-model' },
|
||||
expectedCommand: 'qwen',
|
||||
expectedArgs: ['-m', 'test-model', '--include-directories', 'C:\\a|b'],
|
||||
},
|
||||
{
|
||||
name: 'codex: --add-dir values include metacharacters and spaces',
|
||||
params: { tool: 'codex', prompt: 'hi', cd: projectDir, id: 'case-codex-include', includeDirs: 'C:\\a&b,C:\\c d', model: 'gpt-4' },
|
||||
expectedCommand: 'codex',
|
||||
expectedArgs: ['exec', '--full-auto', '-m', 'gpt-4', '--add-dir', 'C:\\a&b', '--add-dir', 'C:\\c d', '-'],
|
||||
},
|
||||
];
|
||||
|
||||
for (const testCase of cases) {
|
||||
spawnCalls.length = 0;
|
||||
|
||||
await cliExecutorModule.executeCliTool(testCase.params, null);
|
||||
|
||||
const execCall = spawnCalls.find((c) => c.command === testCase.expectedCommand);
|
||||
assert.ok(execCall, `Expected spawn call for ${testCase.expectedCommand} (${testCase.name})`);
|
||||
|
||||
assert.equal(
|
||||
execCall.options?.shell,
|
||||
isWindows,
|
||||
`Expected shell=${String(isWindows)} for ${testCase.expectedCommand} (${testCase.name})`
|
||||
);
|
||||
|
||||
const expectedCommand = isWindows ? escapeWindowsArg(testCase.expectedCommand) : testCase.expectedCommand;
|
||||
const expectedArgs = isWindows ? testCase.expectedArgs.map(escapeWindowsArg) : testCase.expectedArgs;
|
||||
|
||||
assert.equal(execCall.command, expectedCommand, `spawn command (${testCase.name})`);
|
||||
assert.deepEqual(execCall.args, expectedArgs, `spawn args (${testCase.name})`);
|
||||
}
|
||||
});
|
||||
});
|
||||
447
ccw/tests/security/credential-handling.test.ts
Normal file
447
ccw/tests/security/credential-handling.test.ts
Normal file
@@ -0,0 +1,447 @@
|
||||
/**
|
||||
* Security tests for credential handling (DSC-004).
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses an isolated CCW data directory (CCW_DATA_DIR) to avoid touching real user config.
|
||||
*/
|
||||
|
||||
import { after, afterEach, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, mkdirSync, readFileSync, readdirSync, rmSync, statSync, writeFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
const CCW_HOME = mkdtempSync(path.join(tmpdir(), 'ccw-credential-tests-home-'));
|
||||
const PROJECT_ROOT = mkdtempSync(path.join(tmpdir(), 'ccw-credential-tests-project-'));
|
||||
const CONFIG_DIR = path.join(CCW_HOME, 'config');
|
||||
const CONFIG_PATH = path.join(CONFIG_DIR, 'litellm-api-config.json');
|
||||
|
||||
const originalEnv = {
|
||||
CCW_DATA_DIR: process.env.CCW_DATA_DIR,
|
||||
TEST_API_KEY: process.env.TEST_API_KEY,
|
||||
};
|
||||
|
||||
process.env.CCW_DATA_DIR = CCW_HOME;
|
||||
|
||||
const configManagerUrl = new URL('../../dist/config/litellm-api-config-manager.js', import.meta.url);
|
||||
configManagerUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
const litellmRoutesUrl = new URL('../../dist/core/routes/litellm-api-routes.js', import.meta.url);
|
||||
litellmRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let routes: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, reqPath: string, body?: unknown): Promise<JsonResponse> {
|
||||
const url = new URL(reqPath, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse,
|
||||
handler: (body: unknown) => Promise<any>,
|
||||
): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await routes.handleLiteLLMApiRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
return { server, baseUrl: `http://127.0.0.1:${port}` };
|
||||
}
|
||||
|
||||
function loadMaskApiKey(): (apiKey: string) => string {
|
||||
const filePath = new URL('../../src/templates/dashboard-js/views/api-settings.js', import.meta.url);
|
||||
const source = readFileSync(filePath, 'utf8');
|
||||
|
||||
const match = source.match(/function\s+maskApiKey\(apiKey\)\s*\{[\s\S]*?\r?\n\}/);
|
||||
if (!match) {
|
||||
throw new Error('maskApiKey function not found in api-settings.js');
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-new-func
|
||||
const fn = new Function(`${match[0]}; return maskApiKey;`) as () => (apiKey: string) => string;
|
||||
return fn();
|
||||
}
|
||||
|
||||
describe('security: credential handling', async () => {
|
||||
const maskApiKey = loadMaskApiKey();
|
||||
|
||||
function listFilesRecursive(dirPath: string): string[] {
|
||||
const results: string[] = [];
|
||||
const entries = readdirSync(dirPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dirPath, entry.name);
|
||||
if (entry.isDirectory()) results.push(...listFilesRecursive(fullPath));
|
||||
else if (entry.isFile()) results.push(fullPath);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
before(async () => {
|
||||
mod = await import(configManagerUrl.href);
|
||||
routes = await import(litellmRoutesUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.TEST_API_KEY = originalEnv.TEST_API_KEY;
|
||||
rmSync(CONFIG_PATH, { force: true });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mock.restoreAll();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
process.env.CCW_DATA_DIR = originalEnv.CCW_DATA_DIR;
|
||||
process.env.TEST_API_KEY = originalEnv.TEST_API_KEY;
|
||||
rmSync(CCW_HOME, { recursive: true, force: true });
|
||||
rmSync(PROJECT_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('resolveEnvVar returns input unchanged when not ${ENV_VAR}', () => {
|
||||
assert.equal(mod.resolveEnvVar('sk-test-1234'), 'sk-test-1234');
|
||||
assert.equal(mod.resolveEnvVar(''), '');
|
||||
});
|
||||
|
||||
it('resolveEnvVar resolves ${ENV_VAR} syntax', () => {
|
||||
process.env.TEST_API_KEY = 'sk-test-resolved';
|
||||
assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), 'sk-test-resolved');
|
||||
});
|
||||
|
||||
it('resolveEnvVar returns empty string when env var is missing', () => {
|
||||
delete process.env.TEST_API_KEY;
|
||||
assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), '');
|
||||
});
|
||||
|
||||
it('getProviderWithResolvedEnvVars returns provider with resolvedApiKey', () => {
|
||||
process.env.TEST_API_KEY = 'sk-test-resolved';
|
||||
|
||||
const provider = mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Test Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id);
|
||||
assert.ok(resolved);
|
||||
assert.equal(resolved.id, provider.id);
|
||||
assert.equal(resolved.resolvedApiKey, 'sk-test-resolved');
|
||||
});
|
||||
|
||||
it('resolveEnvVar does not log resolved credential values', () => {
|
||||
const secret = 'sk-test-secret-1234567890';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
const calls: string[] = [];
|
||||
mock.method(console, 'log', (...args: unknown[]) => calls.push(args.map(String).join(' ')));
|
||||
mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' ')));
|
||||
|
||||
assert.equal(mod.resolveEnvVar('${TEST_API_KEY}'), secret);
|
||||
assert.equal(calls.some((line) => line.includes(secret)), false);
|
||||
});
|
||||
|
||||
it('getProviderWithResolvedEnvVars does not log resolved credential values', () => {
|
||||
const secret = 'sk-test-secret-abcdef123456';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
const calls: string[] = [];
|
||||
mock.method(console, 'log', (...args: unknown[]) => calls.push(args.map(String).join(' ')));
|
||||
mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' ')));
|
||||
|
||||
const provider = mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Test Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id);
|
||||
assert.ok(resolved);
|
||||
assert.equal(resolved.resolvedApiKey, secret);
|
||||
assert.equal(calls.some((line) => line.includes(secret)), false);
|
||||
});
|
||||
|
||||
it('loadLiteLLMApiConfig logs parse errors without leaking credentials', () => {
|
||||
const secret = 'sk-test-secret-in-file-1234';
|
||||
mkdirSync(CONFIG_DIR, { recursive: true });
|
||||
writeFileSync(CONFIG_PATH, `{\"providers\":[{\"apiKey\":\"${secret}\"`, 'utf8');
|
||||
|
||||
const calls: string[] = [];
|
||||
mock.method(console, 'error', (...args: unknown[]) => calls.push(args.map(String).join(' ')));
|
||||
|
||||
const config = mod.loadLiteLLMApiConfig(PROJECT_ROOT);
|
||||
assert.equal(Array.isArray(config.providers), true);
|
||||
assert.equal(config.providers.length, 0);
|
||||
assert.equal(calls.length > 0, true);
|
||||
assert.equal(calls.some((line) => line.includes(secret)), false);
|
||||
});
|
||||
|
||||
it('loadLiteLLMApiConfig stack traces do not include raw credentials', () => {
|
||||
const secret = 'sk-test-secret-stack-9999';
|
||||
mkdirSync(CONFIG_DIR, { recursive: true });
|
||||
writeFileSync(CONFIG_PATH, `{\"providers\":[{\"apiKey\":\"${secret}\"`, 'utf8');
|
||||
|
||||
const errorArgs: unknown[][] = [];
|
||||
mock.method(console, 'error', (...args: unknown[]) => errorArgs.push(args));
|
||||
|
||||
mod.loadLiteLLMApiConfig(PROJECT_ROOT);
|
||||
|
||||
const errorObj = errorArgs.flat().find((arg) => arg instanceof Error) as Error | undefined;
|
||||
assert.ok(errorObj);
|
||||
assert.equal(String(errorObj.stack ?? '').includes(secret), false);
|
||||
});
|
||||
|
||||
it('maskApiKey hides raw keys but keeps env var references readable', () => {
|
||||
assert.equal(maskApiKey(''), '');
|
||||
assert.equal(maskApiKey('${TEST_API_KEY}'), '${TEST_API_KEY}');
|
||||
assert.equal(maskApiKey('short'), '***');
|
||||
assert.equal(maskApiKey('sk-test-1234567890'), 'sk-t...7890');
|
||||
});
|
||||
|
||||
it('getProviderWithResolvedEnvVars is safe to stringify (no env var syntax or resolved secrets)', () => {
|
||||
const secret = 'sk-test-secret-json-0000';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
const provider = mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Test Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const resolved = mod.getProviderWithResolvedEnvVars(PROJECT_ROOT, provider.id);
|
||||
assert.ok(resolved);
|
||||
|
||||
const payload = JSON.stringify(resolved);
|
||||
assert.equal(payload.includes(secret), false);
|
||||
assert.equal(payload.includes('${TEST_API_KEY}'), false);
|
||||
assert.equal(payload.includes('resolvedApiKey'), false);
|
||||
});
|
||||
|
||||
it('API responses do not expose env var syntax for provider apiKey', async () => {
|
||||
process.env.TEST_API_KEY = 'sk-test-secret-api-1111';
|
||||
|
||||
mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Test Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/litellm-api/providers');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json?.providers);
|
||||
|
||||
assert.equal(res.text.includes('${TEST_API_KEY}'), false);
|
||||
assert.equal(res.text.includes('${'), false);
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('API responses do not expose resolved secrets in generated rotation endpoints', async () => {
|
||||
const secret = 'sk-test-secret-rotation-2222';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
const provider = mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Embed Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
// Ensure provider has an enabled embedding model.
|
||||
mod.updateProvider(PROJECT_ROOT, provider.id, {
|
||||
embeddingModels: [{
|
||||
id: 'emb-1',
|
||||
name: 'text-embedding-test',
|
||||
type: 'embedding',
|
||||
series: 'Test',
|
||||
enabled: true,
|
||||
}],
|
||||
});
|
||||
|
||||
// Configure legacy rotation directly in the config file (avoid auto-sync side effects).
|
||||
mkdirSync(CONFIG_DIR, { recursive: true });
|
||||
const config = mod.loadLiteLLMApiConfig(PROJECT_ROOT);
|
||||
config.codexlensEmbeddingRotation = {
|
||||
enabled: true,
|
||||
strategy: 'round_robin',
|
||||
defaultCooldown: 60,
|
||||
targetModel: 'text-embedding-test',
|
||||
providers: [{
|
||||
providerId: provider.id,
|
||||
modelId: 'emb-1',
|
||||
useAllKeys: true,
|
||||
weight: 1.0,
|
||||
maxConcurrentPerKey: 4,
|
||||
enabled: true,
|
||||
}],
|
||||
};
|
||||
writeFileSync(CONFIG_PATH, JSON.stringify(config, null, 2), 'utf8');
|
||||
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/litellm-api/codexlens/rotation/endpoints');
|
||||
assert.equal(res.status, 200);
|
||||
assert.ok(res.json?.endpoints);
|
||||
|
||||
assert.equal(res.text.includes(secret), false);
|
||||
assert.equal(res.text.includes('${TEST_API_KEY}'), false);
|
||||
assert.equal(res.text.includes('${'), false);
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('stores env var references without persisting resolved secrets when available', () => {
|
||||
const secret = 'sk-test-secret-storage-3333';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Stored Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const content = readFileSync(CONFIG_PATH, 'utf8');
|
||||
assert.equal(content.includes('${TEST_API_KEY}'), true);
|
||||
assert.equal(content.includes(secret), false);
|
||||
});
|
||||
|
||||
it('does not write resolved secrets into ancillary files under CCW_DATA_DIR', () => {
|
||||
const secret = 'sk-test-secret-storage-scan-4444';
|
||||
process.env.TEST_API_KEY = secret;
|
||||
|
||||
mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Stored Provider',
|
||||
type: 'openai',
|
||||
apiKey: '${TEST_API_KEY}',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const files = listFilesRecursive(CCW_HOME);
|
||||
assert.ok(files.length > 0);
|
||||
|
||||
for (const filePath of files) {
|
||||
const content = readFileSync(filePath, 'utf8');
|
||||
assert.equal(content.includes(secret), false);
|
||||
}
|
||||
});
|
||||
|
||||
it('writes config file with restrictive permissions where supported', () => {
|
||||
mod.addProvider(PROJECT_ROOT, {
|
||||
name: 'Perms Provider',
|
||||
type: 'openai',
|
||||
apiKey: 'sk-test-raw-key',
|
||||
apiBase: undefined,
|
||||
enabled: true,
|
||||
});
|
||||
|
||||
const stat = statSync(CONFIG_PATH);
|
||||
assert.equal(stat.isFile(), true);
|
||||
|
||||
if (process.platform === 'win32') return;
|
||||
|
||||
// Require no permissions for group/others (0600).
|
||||
const mode = stat.mode & 0o777;
|
||||
assert.equal(mode & 0o077, 0);
|
||||
});
|
||||
});
|
||||
294
ccw/tests/security/csrf.test.ts
Normal file
294
ccw/tests/security/csrf.test.ts
Normal file
@@ -0,0 +1,294 @@
|
||||
/**
|
||||
* Security regression tests for CSRF protection (DSC-006).
|
||||
*
|
||||
* Verifies:
|
||||
* - State-changing API routes require a valid CSRF token (cookie/header/body)
|
||||
* - Tokens are single-use and session-bound
|
||||
* - CORS rejects non-localhost origins (browser-enforced via mismatched Allow-Origin)
|
||||
* - Development bypass flag disables CSRF validation
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
type HttpResult = {
|
||||
status: number;
|
||||
body: string;
|
||||
headers: http.IncomingHttpHeaders;
|
||||
};
|
||||
|
||||
function httpRequest(options: http.RequestOptions, body?: string, timeout = 10000): Promise<HttpResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(options, (res) => {
|
||||
let data = '';
|
||||
res.on('data', chunk => data += chunk);
|
||||
res.on('end', () => resolve({ status: res.statusCode || 0, body: data, headers: res.headers }));
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.setTimeout(timeout, () => {
|
||||
req.destroy();
|
||||
reject(new Error('Request timeout'));
|
||||
});
|
||||
if (body) req.write(body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function updateCookieJar(jar: Record<string, string>, setCookie: string | string[] | undefined): void {
|
||||
if (!setCookie) return;
|
||||
const cookies = Array.isArray(setCookie) ? setCookie : [setCookie];
|
||||
for (const cookie of cookies) {
|
||||
const pair = cookie.split(';')[0]?.trim();
|
||||
if (!pair) continue;
|
||||
const [name, ...valueParts] = pair.split('=');
|
||||
jar[name] = valueParts.join('=');
|
||||
}
|
||||
}
|
||||
|
||||
function cookieHeader(jar: Record<string, string>): string {
|
||||
return Object.entries(jar)
|
||||
.map(([name, value]) => `${name}=${value}`)
|
||||
.join('; ');
|
||||
}
|
||||
|
||||
function cloneJar(jar: Record<string, string>): Record<string, string> {
|
||||
return { ...jar };
|
||||
}
|
||||
|
||||
async function getDashboardSession(port: number): Promise<{ jar: Record<string, string>; csrfHeader: string | null }> {
|
||||
const jar: Record<string, string> = {};
|
||||
const res = await httpRequest({ hostname: '127.0.0.1', port, path: '/', method: 'GET' });
|
||||
updateCookieJar(jar, res.headers['set-cookie']);
|
||||
return { jar, csrfHeader: typeof res.headers['x-csrf-token'] === 'string' ? res.headers['x-csrf-token'] : null };
|
||||
}
|
||||
|
||||
async function postNotify(port: number, jar: Record<string, string>, extraHeaders?: Record<string, string>, body?: unknown): Promise<HttpResult> {
|
||||
const payload = body === undefined ? { type: 'REFRESH_REQUIRED', scope: 'all' } : body;
|
||||
const encoded = JSON.stringify(payload);
|
||||
return httpRequest(
|
||||
{
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/system/notify',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(Object.keys(jar).length ? { Cookie: cookieHeader(jar) } : {}),
|
||||
...(extraHeaders ?? {}),
|
||||
},
|
||||
},
|
||||
encoded,
|
||||
);
|
||||
}
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const serverUrl = new URL('../../dist/core/server.js', import.meta.url).href;
|
||||
const csrfManagerUrl = new URL('../../dist/core/auth/csrf-manager.js', import.meta.url).href;
|
||||
|
||||
describe('security: CSRF protection', async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let serverMod: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let csrfMod: any;
|
||||
|
||||
let server: http.Server;
|
||||
let port: number;
|
||||
let projectRoot: string;
|
||||
let ccwHome: string;
|
||||
|
||||
before(async () => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-csrf-project-'));
|
||||
ccwHome = mkdtempSync(join(tmpdir(), 'ccw-csrf-home-'));
|
||||
|
||||
process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome };
|
||||
|
||||
serverMod = await import(serverUrl);
|
||||
csrfMod = await import(csrfManagerUrl);
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
server = await serverMod.startServer({ initialPath: projectRoot, port: 0 });
|
||||
const addr = server.address();
|
||||
port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
assert.ok(port > 0, 'Server should start on a valid port');
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
mock.restoreAll();
|
||||
process.env = ORIGINAL_ENV;
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(ccwHome, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('blocks POST requests without CSRF token', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
delete jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 403);
|
||||
assert.ok(res.body.includes('CSRF validation failed'));
|
||||
});
|
||||
|
||||
it('blocks POST requests with forged CSRF token', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
jar['XSRF-TOKEN'] = 'forged-token';
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 403);
|
||||
});
|
||||
|
||||
it('blocks expired CSRF tokens', async () => {
|
||||
csrfMod.resetCsrfTokenManager();
|
||||
csrfMod.getCsrfTokenManager({ tokenTtlMs: 1, cleanupIntervalMs: 0 });
|
||||
|
||||
const { jar } = await getDashboardSession(port);
|
||||
await new Promise(resolve => setTimeout(resolve, 10));
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 403);
|
||||
|
||||
csrfMod.resetCsrfTokenManager();
|
||||
});
|
||||
|
||||
it('blocks token reuse (single-use tokens)', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const oldToken = jar['XSRF-TOKEN'];
|
||||
|
||||
const first = await postNotify(port, jar);
|
||||
assert.equal(first.status, 200);
|
||||
updateCookieJar(jar, first.headers['set-cookie']);
|
||||
|
||||
// Try again using the old token explicitly (should fail).
|
||||
const reuseJar = cloneJar(jar);
|
||||
reuseJar['XSRF-TOKEN'] = oldToken;
|
||||
const secondUse = await postNotify(port, reuseJar);
|
||||
assert.equal(secondUse.status, 403);
|
||||
});
|
||||
|
||||
it('blocks CSRF token theft across sessions', async () => {
|
||||
const sessionA = await getDashboardSession(port);
|
||||
const sessionB = await getDashboardSession(port);
|
||||
|
||||
const jar = cloneJar(sessionB.jar);
|
||||
jar['XSRF-TOKEN'] = sessionA.jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 403);
|
||||
});
|
||||
|
||||
it('does not require CSRF on GET requests', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const res = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
headers: { Cookie: cookieHeader(jar) },
|
||||
});
|
||||
assert.equal(res.status, 200);
|
||||
});
|
||||
|
||||
it('accepts CSRF token provided via cookie (legitimate flow)', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 200);
|
||||
});
|
||||
|
||||
it('accepts CSRF token provided via header', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const token = jar['XSRF-TOKEN'];
|
||||
delete jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar, { 'X-CSRF-Token': token });
|
||||
assert.equal(res.status, 200);
|
||||
});
|
||||
|
||||
it('accepts CSRF token provided via request body', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const token = jar['XSRF-TOKEN'];
|
||||
delete jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar, undefined, { type: 'REFRESH_REQUIRED', scope: 'all', csrfToken: token });
|
||||
assert.equal(res.status, 200);
|
||||
});
|
||||
|
||||
it('rotates CSRF token after successful POST', async () => {
|
||||
const { jar } = await getDashboardSession(port);
|
||||
const firstToken = jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 200);
|
||||
updateCookieJar(jar, res.headers['set-cookie']);
|
||||
|
||||
assert.notEqual(jar['XSRF-TOKEN'], firstToken);
|
||||
});
|
||||
|
||||
it('allows localhost origins and rejects external origins (CORS)', async () => {
|
||||
const allowedOrigin = `http://localhost:${port}`;
|
||||
const allowed = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
headers: { Origin: allowedOrigin },
|
||||
});
|
||||
assert.equal(allowed.headers['access-control-allow-origin'], allowedOrigin);
|
||||
assert.equal(allowed.headers['vary'], 'Origin');
|
||||
|
||||
const evilOrigin = 'http://evil.com';
|
||||
const denied = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
headers: { Origin: evilOrigin },
|
||||
});
|
||||
assert.notEqual(denied.headers['access-control-allow-origin'], evilOrigin);
|
||||
assert.equal(denied.headers['access-control-allow-origin'], `http://localhost:${port}`);
|
||||
});
|
||||
|
||||
it('bypasses CSRF validation when CCW_DISABLE_CSRF=true', async () => {
|
||||
process.env.CCW_DISABLE_CSRF = 'true';
|
||||
const { jar } = await getDashboardSession(port);
|
||||
delete jar['XSRF-TOKEN'];
|
||||
|
||||
const res = await postNotify(port, jar);
|
||||
assert.equal(res.status, 200);
|
||||
|
||||
delete process.env.CCW_DISABLE_CSRF;
|
||||
});
|
||||
|
||||
it('skips CSRF validation for Authorization header auth', async () => {
|
||||
const tokenRes = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/auth/token',
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
const parsed = JSON.parse(tokenRes.body) as { token: string };
|
||||
assert.ok(parsed.token);
|
||||
|
||||
const res = await httpRequest(
|
||||
{
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/system/notify',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${parsed.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
},
|
||||
JSON.stringify({ type: 'REFRESH_REQUIRED', scope: 'all' }),
|
||||
);
|
||||
|
||||
assert.equal(res.status, 200);
|
||||
});
|
||||
});
|
||||
225
ccw/tests/security/path-traversal.test.ts
Normal file
225
ccw/tests/security/path-traversal.test.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
/**
|
||||
* Regression tests for path traversal protections (DSC-005).
|
||||
*
|
||||
* Focus:
|
||||
* - Allowlist enforcement + boundary checks (no "/allowedness" bypass)
|
||||
* - Symlink target re-validation via realpath
|
||||
* - Non-existent path handling via parent-directory validation
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses stubbed fs + fs/promises to avoid touching real filesystem.
|
||||
*/
|
||||
|
||||
import { after, before, beforeEach, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import path from 'node:path';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const fsp = require('node:fs/promises') as typeof import('node:fs/promises');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const fs = require('node:fs') as typeof import('node:fs');
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const os = require('node:os') as typeof import('node:os');
|
||||
|
||||
const pathValidatorUrl = new URL('../../dist/utils/path-validator.js', import.meta.url);
|
||||
pathValidatorUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
const pathResolverUrl = new URL('../../dist/utils/path-resolver.js', import.meta.url);
|
||||
pathResolverUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
|
||||
function resetEnv(): void {
|
||||
for (const key of Object.keys(process.env)) {
|
||||
if (!(key in ORIGINAL_ENV)) delete process.env[key];
|
||||
}
|
||||
for (const [key, value] of Object.entries(ORIGINAL_ENV)) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
function enoent(message: string): Error & { code: string } {
|
||||
const err = new Error(message) as Error & { code: string };
|
||||
err.code = 'ENOENT';
|
||||
return err;
|
||||
}
|
||||
|
||||
type RealpathPlan = Map<string, { type: 'return'; value: string } | { type: 'throw'; error: any }>;
|
||||
const realpathPlan: RealpathPlan = new Map();
|
||||
const realpathCalls: string[] = [];
|
||||
|
||||
const originalRealpath = fsp.realpath;
|
||||
fsp.realpath = (async (p: string) => {
|
||||
realpathCalls.push(p);
|
||||
const planned = realpathPlan.get(p);
|
||||
if (!planned) {
|
||||
throw enoent(`ENOENT: no such file or directory, realpath '${p}'`);
|
||||
}
|
||||
if (planned.type === 'throw') throw planned.error;
|
||||
return planned.value;
|
||||
}) as any;
|
||||
|
||||
type FsState = {
|
||||
existing: Set<string>;
|
||||
realpaths: Map<string, string>;
|
||||
};
|
||||
|
||||
const fsState: FsState = {
|
||||
existing: new Set(),
|
||||
realpaths: new Map(),
|
||||
};
|
||||
|
||||
function key(filePath: string): string {
|
||||
return path.resolve(filePath).replace(/\\/g, '/').toLowerCase();
|
||||
}
|
||||
|
||||
function setExists(filePath: string, exists: boolean): void {
|
||||
const normalized = key(filePath);
|
||||
if (exists) fsState.existing.add(normalized);
|
||||
else fsState.existing.delete(normalized);
|
||||
}
|
||||
|
||||
function setRealpath(filePath: string, realPath: string): void {
|
||||
fsState.realpaths.set(key(filePath), realPath);
|
||||
}
|
||||
|
||||
const originalFs = {
|
||||
existsSync: fs.existsSync,
|
||||
realpathSync: fs.realpathSync,
|
||||
};
|
||||
|
||||
fs.existsSync = ((filePath: string) => fsState.existing.has(key(filePath))) as any;
|
||||
fs.realpathSync = ((filePath: string) => {
|
||||
const mapped = fsState.realpaths.get(key(filePath));
|
||||
return mapped ?? filePath;
|
||||
}) as any;
|
||||
|
||||
const originalHomedir = os.homedir;
|
||||
const TEST_HOME = path.join(process.cwd(), '.tmp-ccw-security-home');
|
||||
os.homedir = () => TEST_HOME;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let pathValidator: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let pathResolver: any;
|
||||
|
||||
describe('security: path traversal regression', async () => {
|
||||
const isWindows = process.platform === 'win32';
|
||||
const allowedRoot = isWindows ? 'C:\\allowed' : '/allowed';
|
||||
const disallowedRoot = isWindows ? 'C:\\secret' : '/secret';
|
||||
|
||||
before(async () => {
|
||||
pathValidator = await import(pathValidatorUrl.href);
|
||||
pathResolver = await import(pathResolverUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
realpathCalls.length = 0;
|
||||
realpathPlan.clear();
|
||||
fsState.existing.clear();
|
||||
fsState.realpaths.clear();
|
||||
resetEnv();
|
||||
});
|
||||
|
||||
it('path-validator rejects traversal/absolute escapes before realpath', async () => {
|
||||
process.env.CCW_PROJECT_ROOT = allowedRoot;
|
||||
const allowedDirectories = [allowedRoot];
|
||||
|
||||
const vectors: Array<{ name: string; input: string }> = [
|
||||
{ name: 'absolute outside allowlist', input: path.join(disallowedRoot, 'secret.txt') },
|
||||
{ name: 'allowed prefix but different dir (allowedness)', input: `${allowedRoot}ness${isWindows ? '\\\\' : '/'}file.txt` },
|
||||
{ name: 'allowed prefix but different dir (allowed-evil)', input: `${allowedRoot}-evil${isWindows ? '\\\\' : '/'}file.txt` },
|
||||
{ name: 'absolute contains .. segment escaping allowlist', input: `${allowedRoot}${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'absolute multi-.. escaping allowlist', input: `${allowedRoot}${isWindows ? '\\\\' : '/'}sub${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'relative traversal one level', input: `..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'relative traversal two levels', input: `..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'mixed separators traversal', input: `sub${isWindows ? '/' : '/'}..${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'posix absolute escape', input: '/etc/passwd' },
|
||||
{ name: 'encoded traversal (decoded once)', input: decodeURIComponent('%2e%2e%2f%2e%2e%2fetc%2fpasswd') },
|
||||
{ name: 'double-encoded traversal (decoded twice)', input: decodeURIComponent(decodeURIComponent('%252e%252e%252f%252e%252e%252fetc%252fpasswd')) },
|
||||
{ name: 'leading dot traversal', input: `.${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret.txt` },
|
||||
{ name: 'nested traversal escape', input: 'sub/../../secret.txt' },
|
||||
{ name: 'alt-drive absolute escape', input: isWindows ? 'D:\\\\secret\\\\file.txt' : '/var/secret/file.txt' },
|
||||
{ name: 'UNC/extended path escape', input: isWindows ? '\\\\\\\\?\\\\C:\\\\secret\\\\file.txt' : '/private/secret/file.txt' },
|
||||
];
|
||||
|
||||
for (const vector of vectors) {
|
||||
await assert.rejects(
|
||||
pathValidator.validatePath(vector.input, { allowedDirectories }),
|
||||
(err: any) => err instanceof Error && err.message.includes('Access denied: path'),
|
||||
vector.name,
|
||||
);
|
||||
}
|
||||
|
||||
assert.deepEqual(realpathCalls, []);
|
||||
});
|
||||
|
||||
it('path-validator enforces directory-boundary allowlists', async () => {
|
||||
process.env.CCW_PROJECT_ROOT = allowedRoot;
|
||||
const allowedDirectories = [path.join(allowedRoot, 'dir')];
|
||||
|
||||
await assert.rejects(
|
||||
pathValidator.validatePath(path.join(allowedRoot, 'dir-malicious', 'file.txt'), { allowedDirectories }),
|
||||
(err: any) => err instanceof Error && err.message.includes('Access denied: path'),
|
||||
);
|
||||
|
||||
const okPath = path.join(allowedRoot, 'dir', 'file.txt');
|
||||
const resolvedOk = await pathValidator.validatePath(okPath, { allowedDirectories });
|
||||
assert.equal(pathValidator.isPathWithinAllowedDirectories(resolvedOk, allowedDirectories), true);
|
||||
});
|
||||
|
||||
it('path-validator rejects symlink targets outside allowlist', async () => {
|
||||
const linkPath = path.join(allowedRoot, 'link.txt');
|
||||
realpathPlan.set(linkPath, { type: 'return', value: path.join(disallowedRoot, 'target.txt') });
|
||||
|
||||
await assert.rejects(
|
||||
pathValidator.validatePath(linkPath, { allowedDirectories: [allowedRoot] }),
|
||||
(err: any) => err instanceof Error && err.message.includes('symlink target'),
|
||||
);
|
||||
});
|
||||
|
||||
it('path-validator rejects non-existent paths when the parent resolves outside allowlist', async () => {
|
||||
const linkDir = path.join(allowedRoot, 'linkdir');
|
||||
const newFile = path.join(linkDir, 'newfile.txt');
|
||||
|
||||
realpathPlan.set(newFile, { type: 'throw', error: enoent('missing') });
|
||||
realpathPlan.set(linkDir, { type: 'return', value: disallowedRoot });
|
||||
|
||||
await assert.rejects(
|
||||
pathValidator.validatePath(newFile, { allowedDirectories: [allowedRoot] }),
|
||||
(err: any) => err instanceof Error && err.message.includes('parent directory'),
|
||||
);
|
||||
});
|
||||
|
||||
it('path-resolver validates baseDir before and after symlink resolution', () => {
|
||||
const baseDir = allowedRoot;
|
||||
setExists(baseDir, true);
|
||||
|
||||
const traversal = pathResolver.validatePath(`${baseDir}${isWindows ? '\\\\' : '/'}..${isWindows ? '\\\\' : '/'}secret`, { baseDir });
|
||||
assert.equal(traversal.valid, false);
|
||||
assert.ok(traversal.error?.includes('Path must be within'));
|
||||
|
||||
const linkPath = path.join(baseDir, 'link');
|
||||
setExists(linkPath, true);
|
||||
setRealpath(linkPath, disallowedRoot);
|
||||
const symlinkEscape = pathResolver.validatePath(linkPath, { baseDir });
|
||||
assert.equal(symlinkEscape.valid, false);
|
||||
assert.ok(symlinkEscape.error?.includes('Path must be within'));
|
||||
|
||||
setExists(linkPath, true);
|
||||
const symlinkParentEscape = pathResolver.validatePath(path.join(linkPath, 'newfile.txt'), { baseDir });
|
||||
assert.equal(symlinkParentEscape.valid, false);
|
||||
assert.ok(symlinkParentEscape.error?.includes('Path must be within'));
|
||||
});
|
||||
});
|
||||
|
||||
after(() => {
|
||||
fsp.realpath = originalRealpath;
|
||||
fs.existsSync = originalFs.existsSync;
|
||||
fs.realpathSync = originalFs.realpathSync;
|
||||
os.homedir = originalHomedir;
|
||||
resetEnv();
|
||||
});
|
||||
151
ccw/tests/server-auth.integration.test.ts
Normal file
151
ccw/tests/server-auth.integration.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* Integration tests for server authentication flow.
|
||||
*
|
||||
* Verifies:
|
||||
* - API routes require auth token
|
||||
* - /api/auth/token returns token + cookie for localhost requests
|
||||
* - Authorization header and cookie auth both work
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
type HttpResult = {
|
||||
status: number;
|
||||
body: string;
|
||||
headers: http.IncomingHttpHeaders;
|
||||
};
|
||||
|
||||
function httpRequest(options: http.RequestOptions, body?: string, timeout = 10000): Promise<HttpResult> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(options, (res) => {
|
||||
let data = '';
|
||||
res.on('data', chunk => data += chunk);
|
||||
res.on('end', () => resolve({ status: res.statusCode || 0, body: data, headers: res.headers }));
|
||||
});
|
||||
req.on('error', reject);
|
||||
req.setTimeout(timeout, () => {
|
||||
req.destroy();
|
||||
reject(new Error('Request timeout'));
|
||||
});
|
||||
if (body) req.write(body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const serverUrl = new URL('../dist/core/server.js', import.meta.url);
|
||||
serverUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
describe('server authentication integration', async () => {
|
||||
let server: http.Server;
|
||||
let port: number;
|
||||
let projectRoot: string;
|
||||
let ccwHome: string;
|
||||
|
||||
before(async () => {
|
||||
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-auth-project-'));
|
||||
ccwHome = mkdtempSync(join(tmpdir(), 'ccw-auth-home-'));
|
||||
|
||||
process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome };
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const serverMod: any = await import(serverUrl.href);
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
server = await serverMod.startServer({ initialPath: projectRoot, port: 0 });
|
||||
const addr = server.address();
|
||||
port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
assert.ok(port > 0, 'Server should start on a valid port');
|
||||
});
|
||||
|
||||
after(async () => {
|
||||
await new Promise<void>((resolve) => {
|
||||
server.close(() => resolve());
|
||||
});
|
||||
mock.restoreAll();
|
||||
process.env = ORIGINAL_ENV;
|
||||
rmSync(projectRoot, { recursive: true, force: true });
|
||||
rmSync(ccwHome, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('rejects unauthenticated API requests with 401', async () => {
|
||||
const response = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
assert.equal(response.status, 401);
|
||||
assert.ok(response.body.includes('Unauthorized'));
|
||||
});
|
||||
|
||||
it('returns auth token and cookie for localhost requests', async () => {
|
||||
const response = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/auth/token',
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
assert.equal(response.status, 200);
|
||||
const data = JSON.parse(response.body) as { token: string; expiresAt: string };
|
||||
assert.ok(data.token);
|
||||
assert.ok(data.expiresAt);
|
||||
|
||||
const setCookie = response.headers['set-cookie'];
|
||||
assert.ok(setCookie && setCookie.length > 0, 'Expected Set-Cookie header');
|
||||
});
|
||||
|
||||
it('accepts Authorization header on API routes', async () => {
|
||||
const tokenResponse = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/auth/token',
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
const { token } = JSON.parse(tokenResponse.body) as { token: string };
|
||||
const response = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Authorization: `Bearer ${token}`,
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(response.status, 200);
|
||||
});
|
||||
|
||||
it('accepts cookie auth on API routes', async () => {
|
||||
const tokenResponse = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/auth/token',
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
const { token } = JSON.parse(tokenResponse.body) as { token: string };
|
||||
const response = await httpRequest({
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path: '/api/health',
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Cookie: `auth_token=${encodeURIComponent(token)}`,
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(response.status, 200);
|
||||
});
|
||||
});
|
||||
|
||||
98
ccw/tests/server.test.ts
Normal file
98
ccw/tests/server.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
/**
|
||||
* Unit tests for server binding defaults and host option plumbing.
|
||||
*/
|
||||
|
||||
import { afterEach, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
|
||||
const serverUrl = new URL('../dist/core/server.js', import.meta.url);
|
||||
serverUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
const serveUrl = new URL('../dist/commands/serve.js', import.meta.url);
|
||||
serveUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
describe('server binding', async () => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let serverMod: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let serveMod: any;
|
||||
|
||||
before(async () => {
|
||||
serverMod = await import(serverUrl.href);
|
||||
serveMod = await import(serveUrl.href);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mock.restoreAll();
|
||||
process.env = ORIGINAL_ENV;
|
||||
});
|
||||
|
||||
it('binds to 127.0.0.1 by default', async () => {
|
||||
const ccwHome = mkdtempSync(join(tmpdir(), 'ccw-server-bind-home-'));
|
||||
process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome };
|
||||
|
||||
const listenCalls: any[] = [];
|
||||
const originalListen = http.Server.prototype.listen;
|
||||
|
||||
mock.method(http.Server.prototype as any, 'listen', function (this: any, ...args: any[]) {
|
||||
listenCalls.push(args);
|
||||
return (originalListen as any).apply(this, args);
|
||||
});
|
||||
|
||||
const server: http.Server = await serverMod.startServer({ initialPath: process.cwd(), port: 0 });
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
|
||||
rmSync(ccwHome, { recursive: true, force: true });
|
||||
|
||||
assert.ok(listenCalls.length > 0, 'Expected server.listen to be called');
|
||||
assert.equal(listenCalls[0][1], '127.0.0.1');
|
||||
});
|
||||
|
||||
it('passes host option through serve command', async () => {
|
||||
const ccwHome = mkdtempSync(join(tmpdir(), 'ccw-serve-bind-home-'));
|
||||
process.env = { ...ORIGINAL_ENV, CCW_DATA_DIR: ccwHome };
|
||||
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
|
||||
let sigintHandler: (() => void) | null = null;
|
||||
const originalOn = process.on.bind(process);
|
||||
mock.method(process as any, 'on', (event: string, handler: any) => {
|
||||
if (event === 'SIGINT') {
|
||||
sigintHandler = handler;
|
||||
return process;
|
||||
}
|
||||
return originalOn(event, handler);
|
||||
});
|
||||
|
||||
const exitCodes: Array<number | undefined> = [];
|
||||
mock.method(process as any, 'exit', (code?: number) => {
|
||||
exitCodes.push(code);
|
||||
});
|
||||
|
||||
const listenCalls: any[] = [];
|
||||
const originalListen = http.Server.prototype.listen;
|
||||
mock.method(http.Server.prototype as any, 'listen', function (this: any, ...args: any[]) {
|
||||
listenCalls.push(args);
|
||||
return (originalListen as any).apply(this, args);
|
||||
});
|
||||
|
||||
await serveMod.serveCommand({ port: 0, browser: false, path: process.cwd(), host: '0.0.0.0' });
|
||||
assert.ok(sigintHandler, 'Expected serveCommand to register SIGINT handler');
|
||||
|
||||
sigintHandler?.();
|
||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
||||
|
||||
rmSync(ccwHome, { recursive: true, force: true });
|
||||
|
||||
assert.ok(exitCodes.includes(0));
|
||||
assert.ok(listenCalls.some((args) => args[1] === '0.0.0.0'));
|
||||
});
|
||||
});
|
||||
|
||||
47
ccw/tests/shell-escape.test.ts
Normal file
47
ccw/tests/shell-escape.test.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
/**
|
||||
* Unit tests for Windows cmd.exe argument escaping (ccw/dist/utils/shell-escape.js)
|
||||
*/
|
||||
|
||||
import { describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
const shellEscapeUrl = new URL('../dist/utils/shell-escape.js', import.meta.url).href;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
describe('escapeWindowsArg', async () => {
|
||||
mod = await import(shellEscapeUrl);
|
||||
|
||||
it('escapes cmd.exe metacharacters with caret', () => {
|
||||
const cases: Array<{ input: string; expected: string }> = [
|
||||
{ input: 'arg|command', expected: 'arg^|command' },
|
||||
{ input: 'arg&command', expected: 'arg^&command' },
|
||||
{ input: 'arg&&command', expected: 'arg^&^&command' },
|
||||
{ input: 'arg||command', expected: 'arg^|^|command' },
|
||||
{ input: 'arg>out.txt', expected: 'arg^>out.txt' },
|
||||
{ input: 'arg>>out.txt', expected: 'arg^>^>out.txt' },
|
||||
{ input: 'arg<input.txt', expected: 'arg^<input.txt' },
|
||||
{ input: '(test)', expected: '^(test^)' },
|
||||
{ input: '%PATH%', expected: '^%PATH^%' },
|
||||
{ input: '!VAR!', expected: '^!VAR^!' },
|
||||
{ input: 'arg"cmd', expected: 'arg^"cmd' },
|
||||
{ input: 'a^b', expected: 'a^^b' },
|
||||
];
|
||||
|
||||
for (const { input, expected } of cases) {
|
||||
assert.equal(mod.escapeWindowsArg(input), expected, `escapeWindowsArg(${JSON.stringify(input)})`);
|
||||
}
|
||||
});
|
||||
|
||||
it('wraps whitespace-containing args in double quotes', () => {
|
||||
assert.equal(mod.escapeWindowsArg('hello world'), '"hello world"');
|
||||
assert.equal(mod.escapeWindowsArg('test & echo'), '"test ^& echo"');
|
||||
assert.equal(mod.escapeWindowsArg('a|b c'), '"a^|b c"');
|
||||
});
|
||||
|
||||
it('handles empty arguments', () => {
|
||||
assert.equal(mod.escapeWindowsArg(''), '""');
|
||||
});
|
||||
});
|
||||
|
||||
179
ccw/tests/skills-routes.test.ts
Normal file
179
ccw/tests/skills-routes.test.ts
Normal file
@@ -0,0 +1,179 @@
|
||||
/**
|
||||
* Integration tests for skills routes path validation.
|
||||
*
|
||||
* Notes:
|
||||
* - Targets runtime implementation shipped in `ccw/dist`.
|
||||
* - Focuses on access control for projectPath and traversal attempts.
|
||||
*/
|
||||
|
||||
import { after, before, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { mkdtempSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
|
||||
const PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-skills-routes-project-'));
|
||||
const OUTSIDE_ROOT = mkdtempSync(join(tmpdir(), 'ccw-skills-routes-outside-'));
|
||||
|
||||
const skillsRoutesUrl = new URL('../dist/core/routes/skills-routes.js', import.meta.url);
|
||||
skillsRoutesUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
type JsonResponse = { status: number; json: any; text: string };
|
||||
|
||||
async function requestJson(baseUrl: string, method: string, path: string, body?: unknown): Promise<JsonResponse> {
|
||||
const url = new URL(path, baseUrl);
|
||||
const payload = body === undefined ? null : Buffer.from(JSON.stringify(body), 'utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = http.request(
|
||||
url,
|
||||
{
|
||||
method,
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
...(payload ? { 'Content-Type': 'application/json', 'Content-Length': String(payload.length) } : {}),
|
||||
},
|
||||
},
|
||||
(res) => {
|
||||
let responseBody = '';
|
||||
res.on('data', (chunk) => {
|
||||
responseBody += chunk.toString();
|
||||
});
|
||||
res.on('end', () => {
|
||||
let json: any = null;
|
||||
try {
|
||||
json = responseBody ? JSON.parse(responseBody) : null;
|
||||
} catch {
|
||||
json = null;
|
||||
}
|
||||
resolve({ status: res.statusCode || 0, json, text: responseBody });
|
||||
});
|
||||
},
|
||||
);
|
||||
req.on('error', reject);
|
||||
if (payload) req.write(payload);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
|
||||
function handlePostRequest(req: http.IncomingMessage, res: http.ServerResponse, handler: (body: unknown) => Promise<any>): void {
|
||||
let body = '';
|
||||
req.on('data', (chunk) => {
|
||||
body += chunk.toString();
|
||||
});
|
||||
req.on('end', async () => {
|
||||
try {
|
||||
const parsed = body ? JSON.parse(body) : {};
|
||||
const result = await handler(parsed);
|
||||
|
||||
if (result?.error) {
|
||||
res.writeHead(result.status || 500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: result.error }));
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async function createServer(initialPath: string): Promise<{ server: http.Server; baseUrl: string }> {
|
||||
const server = http.createServer(async (req, res) => {
|
||||
const url = new URL(req.url || '/', 'http://localhost');
|
||||
const pathname = url.pathname;
|
||||
|
||||
const ctx = {
|
||||
pathname,
|
||||
url,
|
||||
req,
|
||||
res,
|
||||
initialPath,
|
||||
handlePostRequest,
|
||||
broadcastToClients() {},
|
||||
};
|
||||
|
||||
try {
|
||||
const handled = await mod.handleSkillsRoutes(ctx);
|
||||
if (!handled) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Not Found' }));
|
||||
}
|
||||
} catch (err: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err?.message || String(err) }));
|
||||
}
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => server.listen(0, () => resolve()));
|
||||
const addr = server.address();
|
||||
const port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||
return { server, baseUrl: `http://127.0.0.1:${port}` };
|
||||
}
|
||||
|
||||
describe('skills routes path validation', async () => {
|
||||
before(async () => {
|
||||
mock.method(console, 'log', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
mod = await import(skillsRoutesUrl.href);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
mock.restoreAll();
|
||||
rmSync(PROJECT_ROOT, { recursive: true, force: true });
|
||||
rmSync(OUTSIDE_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it('GET /api/skills rejects projectPath outside initialPath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/skills?path=${encodeURIComponent(OUTSIDE_ROOT)}`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/skills/:name/dir rejects traversal via subpath', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const subpath = encodeURIComponent('../..');
|
||||
const pathParam = encodeURIComponent(PROJECT_ROOT);
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/skills/demo/dir?subpath=${subpath}&path=${pathParam}&location=project`);
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/skills/:name rejects traversal via path segment', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const res = await requestJson(baseUrl, 'GET', '/api/skills/../../secret?location=project');
|
||||
assert.equal(res.status, 403);
|
||||
assert.equal(res.json.error, 'Access denied');
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
|
||||
it('GET /api/skills/:name/dir rejects unsafe skill names', async () => {
|
||||
const { server, baseUrl } = await createServer(PROJECT_ROOT);
|
||||
try {
|
||||
const pathParam = encodeURIComponent(PROJECT_ROOT);
|
||||
const res = await requestJson(baseUrl, 'GET', `/api/skills/${encodeURIComponent('bad..name')}/dir?path=${pathParam}&location=project`);
|
||||
assert.equal(res.status, 400);
|
||||
assert.ok(String(res.json.error).includes('Invalid skill name'));
|
||||
} finally {
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
}
|
||||
});
|
||||
});
|
||||
178
ccw/tests/token-manager.test.ts
Normal file
178
ccw/tests/token-manager.test.ts
Normal file
@@ -0,0 +1,178 @@
|
||||
/**
|
||||
* Unit tests for TokenManager authentication helper.
|
||||
*
|
||||
* Notes:
|
||||
* - Targets the runtime implementation shipped in `ccw/dist`.
|
||||
* - Uses in-memory fs stubs (no real file IO).
|
||||
*/
|
||||
|
||||
import { after, beforeEach, describe, it } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import path from 'node:path';
|
||||
import { createRequire } from 'node:module';
|
||||
|
||||
const require = createRequire(import.meta.url);
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const fs = require('node:fs') as typeof import('node:fs');
|
||||
|
||||
const ORIGINAL_ENV = { ...process.env };
|
||||
const TEST_CCW_HOME = path.join(process.cwd(), '.tmp-ccw-auth-home');
|
||||
process.env.CCW_DATA_DIR = TEST_CCW_HOME;
|
||||
|
||||
type FsState = {
|
||||
existing: Set<string>;
|
||||
files: Map<string, string>;
|
||||
mkdirCalls: Array<{ path: string; options: unknown }>;
|
||||
writeCalls: Array<{ path: string; data: string; options: unknown }>;
|
||||
chmodCalls: Array<{ path: string; mode: number }>;
|
||||
};
|
||||
|
||||
const state: FsState = {
|
||||
existing: new Set(),
|
||||
files: new Map(),
|
||||
mkdirCalls: [],
|
||||
writeCalls: [],
|
||||
chmodCalls: [],
|
||||
};
|
||||
|
||||
function key(filePath: string): string {
|
||||
return path.resolve(filePath).replace(/\\/g, '/').toLowerCase();
|
||||
}
|
||||
|
||||
function setExists(filePath: string): void {
|
||||
state.existing.add(key(filePath));
|
||||
}
|
||||
|
||||
function setFile(filePath: string, content: string): void {
|
||||
const normalized = key(filePath);
|
||||
state.files.set(normalized, content);
|
||||
state.existing.add(normalized);
|
||||
}
|
||||
|
||||
const originalFs = {
|
||||
existsSync: fs.existsSync,
|
||||
mkdirSync: fs.mkdirSync,
|
||||
readFileSync: fs.readFileSync,
|
||||
writeFileSync: fs.writeFileSync,
|
||||
chmodSync: fs.chmodSync,
|
||||
};
|
||||
|
||||
fs.existsSync = ((filePath: string) => state.existing.has(key(filePath))) as any;
|
||||
fs.mkdirSync = ((dirPath: string, options: unknown) => {
|
||||
state.mkdirCalls.push({ path: dirPath, options });
|
||||
setExists(dirPath);
|
||||
}) as any;
|
||||
fs.readFileSync = ((filePath: string, encoding: string) => {
|
||||
assert.equal(encoding, 'utf8');
|
||||
const content = state.files.get(key(filePath));
|
||||
if (content !== undefined) return content;
|
||||
|
||||
// Allow Node/third-party modules (e.g., jsonwebtoken) to load normally.
|
||||
return originalFs.readFileSync(filePath, encoding);
|
||||
}) as any;
|
||||
fs.writeFileSync = ((filePath: string, data: string, options: unknown) => {
|
||||
state.writeCalls.push({ path: filePath, data: String(data), options });
|
||||
setFile(filePath, String(data));
|
||||
}) as any;
|
||||
fs.chmodSync = ((filePath: string, mode: number) => {
|
||||
state.chmodCalls.push({ path: filePath, mode });
|
||||
}) as any;
|
||||
|
||||
const tokenManagerUrl = new URL('../dist/core/auth/token-manager.js', import.meta.url).href;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let mod: any;
|
||||
|
||||
beforeEach(() => {
|
||||
state.existing.clear();
|
||||
state.files.clear();
|
||||
state.mkdirCalls.length = 0;
|
||||
state.writeCalls.length = 0;
|
||||
state.chmodCalls.length = 0;
|
||||
});
|
||||
|
||||
describe('TokenManager authentication helper', async () => {
|
||||
mod = await import(tokenManagerUrl);
|
||||
|
||||
it('generateToken produces a valid HS256 JWT with 24h expiry', () => {
|
||||
const manager = new mod.TokenManager();
|
||||
const secret = 's'.repeat(64);
|
||||
const now = Date.now();
|
||||
|
||||
const result = manager.generateToken(secret);
|
||||
assert.ok(result.token.includes('.'));
|
||||
assert.ok(result.expiresAt instanceof Date);
|
||||
|
||||
const [headerB64] = result.token.split('.');
|
||||
const header = JSON.parse(Buffer.from(headerB64, 'base64url').toString('utf8')) as { alg?: string };
|
||||
assert.equal(header.alg, 'HS256');
|
||||
|
||||
const msUntilExpiry = result.expiresAt.getTime() - now;
|
||||
assert.ok(msUntilExpiry > 23 * 60 * 60 * 1000);
|
||||
assert.ok(msUntilExpiry < 24 * 60 * 60 * 1000 + 60 * 1000);
|
||||
});
|
||||
|
||||
it('validateToken accepts correct secret and rejects wrong secret', () => {
|
||||
const manager = new mod.TokenManager();
|
||||
const secret = 'my-secret';
|
||||
const { token } = manager.generateToken(secret);
|
||||
|
||||
assert.equal(manager.validateToken(token, secret), true);
|
||||
assert.equal(manager.validateToken(token, 'wrong-secret'), false);
|
||||
});
|
||||
|
||||
it('validateToken rejects expired tokens', () => {
|
||||
const manager = new mod.TokenManager({ tokenTtlMs: -1000 });
|
||||
const secret = 'my-secret';
|
||||
const { token } = manager.generateToken(secret);
|
||||
|
||||
assert.equal(manager.validateToken(token, secret), false);
|
||||
});
|
||||
|
||||
it('persists and reloads secret key with restrictive permissions', () => {
|
||||
const authDir = path.join(TEST_CCW_HOME, 'auth');
|
||||
const secretPath = path.join(authDir, 'secret.key');
|
||||
|
||||
const manager1 = new mod.TokenManager({ authDir, secretKeyPath: secretPath });
|
||||
const secret1 = manager1.getSecretKey();
|
||||
|
||||
assert.equal(secret1.length, 64); // 32 bytes hex
|
||||
assert.equal(state.writeCalls.length, 1);
|
||||
assert.equal(state.writeCalls[0].path, secretPath);
|
||||
assert.deepEqual(state.writeCalls[0].options, { encoding: 'utf8', mode: 0o600 });
|
||||
assert.deepEqual(state.chmodCalls, [{ path: secretPath, mode: 0o600 }]);
|
||||
|
||||
const manager2 = new mod.TokenManager({ authDir, secretKeyPath: secretPath });
|
||||
const secret2 = manager2.getSecretKey();
|
||||
assert.equal(secret2, secret1);
|
||||
});
|
||||
|
||||
it('rotates token before expiry and persists updated token', () => {
|
||||
const authDir = path.join(TEST_CCW_HOME, 'auth');
|
||||
const tokenPath = path.join(authDir, 'token.jwt');
|
||||
|
||||
const manager = new mod.TokenManager({
|
||||
authDir,
|
||||
tokenPath,
|
||||
tokenTtlMs: 1000,
|
||||
rotateBeforeExpiryMs: 2000,
|
||||
});
|
||||
|
||||
const first = manager.getOrCreateAuthToken();
|
||||
const tokenFileFirst = state.files.get(key(tokenPath));
|
||||
assert.equal(tokenFileFirst, first.token);
|
||||
|
||||
const second = manager.getOrCreateAuthToken();
|
||||
const tokenFileSecond = state.files.get(key(tokenPath));
|
||||
assert.equal(tokenFileSecond, second.token);
|
||||
assert.notEqual(second.token, first.token);
|
||||
});
|
||||
});
|
||||
|
||||
after(() => {
|
||||
fs.existsSync = originalFs.existsSync;
|
||||
fs.mkdirSync = originalFs.mkdirSync;
|
||||
fs.readFileSync = originalFs.readFileSync;
|
||||
fs.writeFileSync = originalFs.writeFileSync;
|
||||
fs.chmodSync = originalFs.chmodSync;
|
||||
process.env = ORIGINAL_ENV;
|
||||
});
|
||||
Reference in New Issue
Block a user