mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-12 02:37:45 +08:00
* feat(security): Secure dashboard server by default ## Solution Summary - Solution-ID: SOL-DSC-002-1 - Issue-ID: DSC-002 ## Tasks Completed - [T1] JWT token manager (24h expiry, persisted secret/token) - [T2] API auth middleware + localhost token endpoint - [T3] Default bind 127.0.0.1, add --host with warning - [T4] Localhost-only CORS with credentials + Vary - [T5] SECURITY.md documentation + README link ## Verification - npm run build - npm test -- ccw/tests/token-manager.test.ts ccw/tests/middleware.test.ts ccw/tests/server-auth.integration.test.ts ccw/tests/server.test.ts ccw/tests/cors.test.ts * fix(security): Prevent command injection in Windows spawn() ## Solution Summary - **Solution-ID**: SOL-DSC-001-1 - **Issue-ID**: DSC-001 - **Risk/Impact/Complexity**: high/high/medium ## Tasks Completed - [T1] Create Windows shell escape utility - [T2] Escape cli-executor spawn() args on Windows - [T3] Add command injection regression tests ## Files Modified - ccw/src/utils/shell-escape.ts - ccw/src/tools/cli-executor.ts - ccw/tests/shell-escape.test.ts - ccw/tests/security/command-injection.test.ts ## Verification - npm run build - npm test -- ccw/tests/shell-escape.test.ts ccw/tests/security/command-injection.test.ts * fix(security): Harden path validation (DSC-005) ## Solution Summary - Solution-ID: SOL-DSC-005-1 - Issue-ID: DSC-005 ## Tasks Completed - T1: Refactor path validation to pre-resolution checking - T2: Implement allowlist-based path validation - T3: Add path validation to API routes - T4: Add path security regression tests ## Files Modified - ccw/src/utils/path-resolver.ts - ccw/src/utils/path-validator.ts - ccw/src/core/routes/graph-routes.ts - ccw/src/core/routes/files-routes.ts - ccw/src/core/routes/skills-routes.ts - ccw/tests/path-resolver.test.ts - ccw/tests/graph-routes.test.ts - ccw/tests/files-routes.test.ts - ccw/tests/skills-routes.test.ts - ccw/tests/security/path-traversal.test.ts ## Verification - npm run build - npm test -- path-resolver.test.ts - npm test -- path-validator.test.ts - npm test -- graph-routes.test.ts - npm test -- files-routes.test.ts - npm test -- skills-routes.test.ts - npm test -- ccw/tests/security/path-traversal.test.ts * fix(security): Prevent credential leakage (DSC-004) ## Solution Summary - Solution-ID: SOL-DSC-004-1 - Issue-ID: DSC-004 ## Tasks Completed - T1: Create credential handling security tests - T2: Add log sanitization tests - T3: Add env var leakage prevention tests - T4: Add secure storage tests ## Files Modified - ccw/src/config/litellm-api-config-manager.ts - ccw/src/core/routes/litellm-api-routes.ts - ccw/tests/security/credential-handling.test.ts ## Verification - npm run build - node --experimental-strip-types --test ccw/tests/security/credential-handling.test.ts * test(ranking): expand normalize_weights edge case coverage (ISS-1766920108814-0) ## Solution Summary - Solution-ID: SOL-20251228113607 - Issue-ID: ISS-1766920108814-0 ## Tasks Completed - T1: Fix NaN and invalid total handling in normalize_weights - T2: Add unit tests for NaN edge cases in normalize_weights ## Files Modified - codex-lens/tests/test_rrf_fusion.py ## Verification - python -m pytest codex-lens/tests/test_rrf_fusion.py::TestNormalizeBM25Score -v - python -m pytest codex-lens/tests/test_rrf_fusion.py -v -k normalize - python -m pytest codex-lens/tests/test_rrf_fusion.py::TestReciprocalRankFusion::test_weight_normalization codex-lens/tests/test_cli_hybrid_search.py::TestCLIHybridSearch::test_weights_normalization -v * feat(security): Add CSRF protection and tighten CORS (DSC-006) ## Solution Summary - Solution-ID: SOL-DSC-006-1 - Issue-ID: DSC-006 - Risk/Impact/Complexity: high/high/medium ## Tasks Completed - T1: Create CSRF token generation system - T2: Add CSRF token endpoints - T3: Implement CSRF validation middleware - T4: Restrict CORS to trusted origins - T5: Add CSRF security tests ## Files Modified - ccw/src/core/auth/csrf-manager.ts - ccw/src/core/auth/csrf-middleware.ts - ccw/src/core/routes/auth-routes.ts - ccw/src/core/server.ts - ccw/tests/csrf-manager.test.ts - ccw/tests/auth-routes.test.ts - ccw/tests/csrf-middleware.test.ts - ccw/tests/security/csrf.test.ts ## Verification - npm run build - node --experimental-strip-types --test ccw/tests/csrf-manager.test.ts - node --experimental-strip-types --test ccw/tests/auth-routes.test.ts - node --experimental-strip-types --test ccw/tests/csrf-middleware.test.ts - node --experimental-strip-types --test ccw/tests/cors.test.ts - node --experimental-strip-types --test ccw/tests/security/csrf.test.ts * fix(cli-executor): prevent stale SIGKILL timeouts ## Solution Summary - Solution-ID: SOL-DSC-007-1 - Issue-ID: DSC-007 - Risk/Impact/Complexity: low/low/low ## Tasks Completed - [T1] Store timeout handle in killCurrentCliProcess ## Files Modified - ccw/src/tools/cli-executor.ts - ccw/tests/cli-executor-kill.test.ts ## Verification - node --experimental-strip-types --test ccw/tests/cli-executor-kill.test.ts * fix(cli-executor): enhance merge validation guards ## Solution Summary - Solution-ID: SOL-DSC-008-1 - Issue-ID: DSC-008 - Risk/Impact/Complexity: low/low/low ## Tasks Completed - [T1] Enhance sourceConversations array validation ## Files Modified - ccw/src/tools/cli-executor.ts - ccw/tests/cli-executor-merge-validation.test.ts ## Verification - node --experimental-strip-types --test ccw/tests/cli-executor-merge-validation.test.ts * refactor(core): remove @ts-nocheck from core routes ## Solution Summary - Solution-ID: SOL-DSC-003-1 - Issue-ID: DSC-003 - Queue-ID: QUE-20260106-164500 - Item-ID: S-9 ## Tasks Completed - T1: Create shared RouteContext type definition - T2: Remove @ts-nocheck from small route files - T3: Remove @ts-nocheck from medium route files - T4: Remove @ts-nocheck from large route files - T5: Remove @ts-nocheck from remaining core files ## Files Modified - ccw/src/core/dashboard-generator-patch.ts - ccw/src/core/dashboard-generator.ts - ccw/src/core/routes/ccw-routes.ts - ccw/src/core/routes/claude-routes.ts - ccw/src/core/routes/cli-routes.ts - ccw/src/core/routes/codexlens-routes.ts - ccw/src/core/routes/discovery-routes.ts - ccw/src/core/routes/files-routes.ts - ccw/src/core/routes/graph-routes.ts - ccw/src/core/routes/help-routes.ts - ccw/src/core/routes/hooks-routes.ts - ccw/src/core/routes/issue-routes.ts - ccw/src/core/routes/litellm-api-routes.ts - ccw/src/core/routes/litellm-routes.ts - ccw/src/core/routes/mcp-routes.ts - ccw/src/core/routes/mcp-routes.ts.backup - ccw/src/core/routes/mcp-templates-db.ts - ccw/src/core/routes/nav-status-routes.ts - ccw/src/core/routes/rules-routes.ts - ccw/src/core/routes/session-routes.ts - ccw/src/core/routes/skills-routes.ts - ccw/src/core/routes/status-routes.ts - ccw/src/core/routes/system-routes.ts - ccw/src/core/routes/types.ts - ccw/src/core/server.ts - ccw/src/core/websocket.ts ## Verification - npm run build - npm test * refactor: split cli-executor and codexlens routes into modules ## Solution Summary - Solution-ID: SOL-DSC-012-1 - Issue-ID: DSC-012 - Risk/Impact/Complexity: medium/medium/high ## Tasks Completed - [T1] Extract execution orchestration from cli-executor.ts (Refactor ccw/src/tools) - [T2] Extract route handlers from codexlens-routes.ts (Refactor ccw/src/core/routes) - [T3] Extract prompt concatenation logic from cli-executor (Refactor ccw/src/tools) - [T4] Document refactored module architecture (Docs) ## Files Modified - ccw/src/tools/cli-executor.ts - ccw/src/tools/cli-executor-core.ts - ccw/src/tools/cli-executor-utils.ts - ccw/src/tools/cli-executor-state.ts - ccw/src/tools/cli-prompt-builder.ts - ccw/src/tools/README.md - ccw/src/core/routes/codexlens-routes.ts - ccw/src/core/routes/codexlens/config-handlers.ts - ccw/src/core/routes/codexlens/index-handlers.ts - ccw/src/core/routes/codexlens/semantic-handlers.ts - ccw/src/core/routes/codexlens/watcher-handlers.ts - ccw/src/core/routes/codexlens/utils.ts - ccw/src/core/routes/codexlens/README.md ## Verification - npm run build - npm test * test(issue): Add comprehensive issue command tests ## Solution Summary - **Solution-ID**: SOL-DSC-009-1 - **Issue-ID**: DSC-009 - **Risk/Impact/Complexity**: low/high/medium ## Tasks Completed - [T1] Create issue command test file structure: Create isolated test harness - [T2] Add JSONL read/write operation tests: Verify JSONL correctness and errors - [T3] Add issue lifecycle tests: Verify status transitions and timestamps - [T4] Add solution binding tests: Verify binding flows and error cases - [T5] Add queue formation tests: Verify queue creation, IDs, and DAG behavior - [T6] Add queue execution tests: Verify next/done/retry and status sync ## Files Modified - ccw/src/commands/issue.ts - ccw/tests/issue-command.test.ts ## Verification - node --experimental-strip-types --test ccw/tests/issue-command.test.ts * test(routes): Add integration tests for route modules ## Solution Summary - Solution-ID: SOL-DSC-010-1 - Issue-ID: DSC-010 - Queue-ID: QUE-20260106-164500 ## Tasks Completed - [T1] Add tests for ccw-routes.ts - [T2] Add tests for files-routes.ts - [T3] Add tests for claude-routes.ts (includes Windows path fix for create) - [T4] Add tests for issue-routes.ts - [T5] Add tests for help-routes.ts (avoid hanging watchers) - [T6] Add tests for nav-status-routes.ts - [T7] Add tests for hooks/graph/rules/skills/litellm-api routes ## Files Modified - ccw/src/core/routes/claude-routes.ts - ccw/src/core/routes/help-routes.ts - ccw/tests/integration/ccw-routes.test.ts - ccw/tests/integration/claude-routes.test.ts - ccw/tests/integration/files-routes.test.ts - ccw/tests/integration/issue-routes.test.ts - ccw/tests/integration/help-routes.test.ts - ccw/tests/integration/nav-status-routes.test.ts - ccw/tests/integration/hooks-routes.test.ts - ccw/tests/integration/graph-routes.test.ts - ccw/tests/integration/rules-routes.test.ts - ccw/tests/integration/skills-routes.test.ts - ccw/tests/integration/litellm-api-routes.test.ts ## Verification - node --experimental-strip-types --test ccw/tests/integration/ccw-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/files-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/claude-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/issue-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/help-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/nav-status-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/hooks-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/graph-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/rules-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/skills-routes.test.ts - node --experimental-strip-types --test ccw/tests/integration/litellm-api-routes.test.ts * refactor(core): Switch cache and lite scanning to async fs ## Solution Summary - Solution-ID: SOL-DSC-013-1 - Issue-ID: DSC-013 - Queue-ID: QUE-20260106-164500 ## Tasks Completed - [T1] Convert cache-manager.ts to async file operations - [T2] Convert lite-scanner.ts to async file operations - [T3] Update cache-manager call sites to await async API - [T4] Update lite-scanner call sites to await async API ## Files Modified - ccw/src/core/cache-manager.ts - ccw/src/core/lite-scanner.ts - ccw/src/core/data-aggregator.ts ## Verification - npm run build - npm test * fix(exec): Add timeout protection for execSync ## Solution Summary - Solution-ID: SOL-DSC-014-1 - Issue-ID: DSC-014 - Queue-ID: QUE-20260106-164500 ## Tasks Completed - [T1] Add timeout to execSync calls in python-utils.ts - [T2] Add timeout to execSync calls in detect-changed-modules.ts - [T3] Add timeout to execSync calls in claude-freshness.ts - [T4] Add timeout to execSync calls in issue.ts - [T5] Consolidate execSync timeout constants and audit coverage ## Files Modified - ccw/src/utils/exec-constants.ts - ccw/src/utils/python-utils.ts - ccw/src/tools/detect-changed-modules.ts - ccw/src/core/claude-freshness.ts - ccw/src/commands/issue.ts - ccw/src/tools/smart-search.ts - ccw/src/tools/codex-lens.ts - ccw/src/core/routes/codexlens/config-handlers.ts ## Verification - npm run build - npm test - node --experimental-strip-types --test ccw/tests/issue-command.test.ts * feat(cli): Add progress spinner with elapsed time for long-running operations ## Solution Summary - Solution-ID: SOL-DSC-015-1 - Issue-ID: DSC-015 - Queue-Item: S-15 - Risk/Impact/Complexity: low/medium/low ## Tasks Completed - [T1] Add progress spinner to CLI execution: Update ccw/src/commands/cli.ts ## Files Modified - ccw/src/commands/cli.ts - ccw/tests/cli-command.test.ts ## Verification - node --experimental-strip-types --test ccw/tests/cli-command.test.ts - node --experimental-strip-types --test ccw/tests/cli-executor-kill.test.ts - node --experimental-strip-types --test ccw/tests/cli-executor-merge-validation.test.ts * fix(cli): Move full output hint immediately after truncation notice ## Solution Summary - Solution-ID: SOL-DSC-016-1 - Issue-ID: DSC-016 - Queue-Item: S-16 - Risk/Impact/Complexity: low/high/low ## Tasks Completed - [T1] Relocate output hint after truncation: Update ccw/src/commands/cli.ts ## Files Modified - ccw/src/commands/cli.ts - ccw/tests/cli-command.test.ts ## Verification - npm run build - node --experimental-strip-types --test ccw/tests/cli-command.test.ts * feat(cli): Add confirmation prompts for destructive operations ## Solution Summary - Solution-ID: SOL-DSC-017-1 - Issue-ID: DSC-017 - Queue-Item: S-17 - Risk/Impact/Complexity: low/high/low ## Tasks Completed - [T1] Add confirmation to storage clean operations: Update ccw/src/commands/cli.ts - [T2] Add confirmation to issue queue delete: Update ccw/src/commands/issue.ts ## Files Modified - ccw/src/commands/cli.ts - ccw/src/commands/issue.ts - ccw/tests/cli-command.test.ts - ccw/tests/issue-command.test.ts ## Verification - npm run build - node --experimental-strip-types --test ccw/tests/cli-command.test.ts - node --experimental-strip-types --test ccw/tests/issue-command.test.ts * feat(cli): Improve multi-line prompt guidance ## Solution Summary - Solution-ID: SOL-DSC-018-1 - Issue-ID: DSC-018 - Queue-Item: S-18 - Risk/Impact/Complexity: low/medium/low ## Tasks Completed - [T1] Update CLI help to emphasize --file option: Update ccw/src/commands/cli.ts - [T2] Add inline hint for multi-line detection: Update ccw/src/commands/cli.ts ## Files Modified - ccw/src/commands/cli.ts - ccw/tests/cli-command.test.ts ## Verification - npm run build - node --experimental-strip-types --test ccw/tests/cli-command.test.ts --------- Co-authored-by: catlog22 <catlog22@github.com>
597 lines
21 KiB
TypeScript
597 lines
21 KiB
TypeScript
/**
|
|
* Discovery Routes Module
|
|
*
|
|
* Storage Structure:
|
|
* .workflow/issues/discoveries/
|
|
* ├── index.json # Discovery session index
|
|
* └── {discovery-id}/
|
|
* ├── discovery-state.json # State machine
|
|
* ├── discovery-progress.json # Real-time progress
|
|
* ├── perspectives/ # Per-perspective results
|
|
* │ ├── bug.json
|
|
* │ └── ...
|
|
* ├── external-research.json # Exa research results
|
|
* ├── discovery-issues.jsonl # Generated candidate issues
|
|
* └── reports/
|
|
*
|
|
* API Endpoints:
|
|
* - GET /api/discoveries - List all discovery sessions
|
|
* - GET /api/discoveries/:id - Get discovery session detail
|
|
* - GET /api/discoveries/:id/findings - Get all findings
|
|
* - GET /api/discoveries/:id/progress - Get real-time progress
|
|
* - POST /api/discoveries/:id/export - Export findings as issues
|
|
* - PATCH /api/discoveries/:id/findings/:fid - Update finding status
|
|
* - DELETE /api/discoveries/:id - Delete discovery session
|
|
*/
|
|
import { readFileSync, existsSync, writeFileSync, mkdirSync, readdirSync, rmSync } from 'fs';
|
|
import { join } from 'path';
|
|
import type { RouteContext } from './types.js';
|
|
|
|
// ========== Helper Functions ==========
|
|
|
|
function getDiscoveriesDir(projectPath: string): string {
|
|
return join(projectPath, '.workflow', 'issues', 'discoveries');
|
|
}
|
|
|
|
function readDiscoveryIndex(discoveriesDir: string): { discoveries: any[]; total: number } {
|
|
const indexPath = join(discoveriesDir, 'index.json');
|
|
|
|
// Try to read index.json first
|
|
if (existsSync(indexPath)) {
|
|
try {
|
|
return JSON.parse(readFileSync(indexPath, 'utf8'));
|
|
} catch {
|
|
// Fall through to scan
|
|
}
|
|
}
|
|
|
|
// Fallback: scan directory for discovery folders
|
|
if (!existsSync(discoveriesDir)) {
|
|
return { discoveries: [], total: 0 };
|
|
}
|
|
|
|
try {
|
|
const entries = readdirSync(discoveriesDir, { withFileTypes: true });
|
|
const discoveries: any[] = [];
|
|
|
|
for (const entry of entries) {
|
|
if (entry.isDirectory() && entry.name.startsWith('DSC-')) {
|
|
const statePath = join(discoveriesDir, entry.name, 'discovery-state.json');
|
|
if (existsSync(statePath)) {
|
|
try {
|
|
const state = JSON.parse(readFileSync(statePath, 'utf8'));
|
|
discoveries.push({
|
|
discovery_id: entry.name,
|
|
target_pattern: state.target_pattern,
|
|
perspectives: state.metadata?.perspectives || [],
|
|
created_at: state.metadata?.created_at,
|
|
completed_at: state.completed_at
|
|
});
|
|
} catch {
|
|
// Skip invalid entries
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Sort by creation time descending
|
|
discoveries.sort((a, b) => {
|
|
const timeA = new Date(a.created_at || 0).getTime();
|
|
const timeB = new Date(b.created_at || 0).getTime();
|
|
return timeB - timeA;
|
|
});
|
|
|
|
return { discoveries, total: discoveries.length };
|
|
} catch {
|
|
return { discoveries: [], total: 0 };
|
|
}
|
|
}
|
|
|
|
function writeDiscoveryIndex(discoveriesDir: string, index: any) {
|
|
if (!existsSync(discoveriesDir)) {
|
|
mkdirSync(discoveriesDir, { recursive: true });
|
|
}
|
|
writeFileSync(join(discoveriesDir, 'index.json'), JSON.stringify(index, null, 2));
|
|
}
|
|
|
|
function readDiscoveryState(discoveriesDir: string, discoveryId: string): any | null {
|
|
const statePath = join(discoveriesDir, discoveryId, 'discovery-state.json');
|
|
if (!existsSync(statePath)) return null;
|
|
try {
|
|
return JSON.parse(readFileSync(statePath, 'utf8'));
|
|
} catch {
|
|
return null;
|
|
}
|
|
}
|
|
|
|
function readDiscoveryProgress(discoveriesDir: string, discoveryId: string): any | null {
|
|
// Try merged state first (new schema)
|
|
const statePath = join(discoveriesDir, discoveryId, 'discovery-state.json');
|
|
if (existsSync(statePath)) {
|
|
try {
|
|
const state = JSON.parse(readFileSync(statePath, 'utf8'));
|
|
// New merged schema: perspectives array + results object
|
|
if (state.perspectives && Array.isArray(state.perspectives)) {
|
|
const completed = state.perspectives.filter((p: any) => p.status === 'completed').length;
|
|
const total = state.perspectives.length;
|
|
return {
|
|
discovery_id: discoveryId,
|
|
phase: state.phase,
|
|
last_update: state.updated_at || state.created_at,
|
|
progress: {
|
|
perspective_analysis: {
|
|
total,
|
|
completed,
|
|
in_progress: state.perspectives.filter((p: any) => p.status === 'in_progress').length,
|
|
percent_complete: total > 0 ? Math.round((completed / total) * 100) : 0
|
|
},
|
|
external_research: state.external_research || { enabled: false, completed: false },
|
|
aggregation: { completed: state.phase === 'aggregation' || state.phase === 'complete' },
|
|
issue_generation: { completed: state.phase === 'complete', issues_count: state.results?.issues_generated || 0 }
|
|
},
|
|
agent_status: state.perspectives
|
|
};
|
|
}
|
|
// Old schema: metadata.perspectives (backward compat)
|
|
if (state.metadata?.perspectives) {
|
|
return {
|
|
discovery_id: discoveryId,
|
|
phase: state.phase,
|
|
progress: { perspective_analysis: { total: state.metadata.perspectives.length, completed: state.perspectives_completed?.length || 0 } }
|
|
};
|
|
}
|
|
} catch {
|
|
// Fall through
|
|
}
|
|
}
|
|
// Fallback: try legacy progress file
|
|
const progressPath = join(discoveriesDir, discoveryId, 'discovery-progress.json');
|
|
if (existsSync(progressPath)) {
|
|
try { return JSON.parse(readFileSync(progressPath, 'utf8')); } catch { return null; }
|
|
}
|
|
return null;
|
|
}
|
|
|
|
function readPerspectiveFindings(discoveriesDir: string, discoveryId: string): any[] {
|
|
const perspectivesDir = join(discoveriesDir, discoveryId, 'perspectives');
|
|
if (!existsSync(perspectivesDir)) return [];
|
|
|
|
const allFindings: any[] = [];
|
|
const files = readdirSync(perspectivesDir).filter(f => f.endsWith('.json'));
|
|
|
|
for (const file of files) {
|
|
try {
|
|
const content = JSON.parse(readFileSync(join(perspectivesDir, file), 'utf8'));
|
|
const perspective = file.replace('.json', '');
|
|
|
|
if (content.findings && Array.isArray(content.findings)) {
|
|
allFindings.push({
|
|
perspective,
|
|
summary: content.summary || {},
|
|
findings: content.findings.map((f: any) => ({
|
|
...f,
|
|
perspective: f.perspective || perspective
|
|
}))
|
|
});
|
|
}
|
|
} catch {
|
|
// Skip invalid files
|
|
}
|
|
}
|
|
|
|
return allFindings;
|
|
}
|
|
|
|
function readDiscoveryIssues(discoveriesDir: string, discoveryId: string): any[] {
|
|
const issuesPath = join(discoveriesDir, discoveryId, 'discovery-issues.jsonl');
|
|
if (!existsSync(issuesPath)) return [];
|
|
try {
|
|
const content = readFileSync(issuesPath, 'utf8');
|
|
return content.split('\n').filter(line => line.trim()).map(line => JSON.parse(line));
|
|
} catch {
|
|
return [];
|
|
}
|
|
}
|
|
|
|
function writeDiscoveryIssues(discoveriesDir: string, discoveryId: string, issues: any[]) {
|
|
const issuesPath = join(discoveriesDir, discoveryId, 'discovery-issues.jsonl');
|
|
writeFileSync(issuesPath, issues.map(i => JSON.stringify(i)).join('\n'));
|
|
}
|
|
|
|
function flattenFindings(perspectiveResults: any[]): any[] {
|
|
const allFindings: any[] = [];
|
|
for (const result of perspectiveResults) {
|
|
if (result.findings) {
|
|
allFindings.push(...result.findings);
|
|
}
|
|
}
|
|
return allFindings;
|
|
}
|
|
|
|
function appendToIssuesJsonl(projectPath: string, issues: any[]): { added: number; skipped: number; skippedIds: string[] } {
|
|
const issuesDir = join(projectPath, '.workflow', 'issues');
|
|
const issuesPath = join(issuesDir, 'issues.jsonl');
|
|
|
|
if (!existsSync(issuesDir)) {
|
|
mkdirSync(issuesDir, { recursive: true });
|
|
}
|
|
|
|
// Read existing issues
|
|
let existingIssues: any[] = [];
|
|
if (existsSync(issuesPath)) {
|
|
try {
|
|
const content = readFileSync(issuesPath, 'utf8');
|
|
existingIssues = content.split('\n').filter(line => line.trim()).map(line => JSON.parse(line));
|
|
} catch {
|
|
// Start fresh
|
|
}
|
|
}
|
|
|
|
// Build set of existing IDs and source_finding combinations for deduplication
|
|
const existingIds = new Set(existingIssues.map(i => i.id));
|
|
const existingSourceFindings = new Set(
|
|
existingIssues
|
|
.filter(i => i.source === 'discovery' && i.source_finding_id)
|
|
.map(i => `${i.source_discovery_id}:${i.source_finding_id}`)
|
|
);
|
|
|
|
// Convert and filter duplicates
|
|
const skippedIds: string[] = [];
|
|
const newIssues: any[] = [];
|
|
|
|
for (const di of issues) {
|
|
// Check for duplicate by ID
|
|
if (existingIds.has(di.id)) {
|
|
skippedIds.push(di.id);
|
|
continue;
|
|
}
|
|
|
|
// Check for duplicate by source_discovery_id + source_finding_id
|
|
const sourceKey = `${di.source_discovery_id}:${di.source_finding_id}`;
|
|
if (di.source_finding_id && existingSourceFindings.has(sourceKey)) {
|
|
skippedIds.push(di.id);
|
|
continue;
|
|
}
|
|
|
|
newIssues.push({
|
|
id: di.id,
|
|
title: di.title,
|
|
status: 'registered',
|
|
priority: di.priority || 3,
|
|
context: di.context || di.description || '',
|
|
source: 'discovery',
|
|
source_discovery_id: di.source_discovery_id,
|
|
source_finding_id: di.source_finding_id,
|
|
perspective: di.perspective,
|
|
file: di.file,
|
|
line: di.line,
|
|
labels: di.labels || [],
|
|
created_at: new Date().toISOString(),
|
|
updated_at: new Date().toISOString()
|
|
});
|
|
}
|
|
|
|
if (newIssues.length > 0) {
|
|
const allIssues = [...existingIssues, ...newIssues];
|
|
writeFileSync(issuesPath, allIssues.map(i => JSON.stringify(i)).join('\n'));
|
|
}
|
|
|
|
return { added: newIssues.length, skipped: skippedIds.length, skippedIds };
|
|
}
|
|
|
|
// ========== Route Handler ==========
|
|
|
|
export async function handleDiscoveryRoutes(ctx: RouteContext): Promise<boolean> {
|
|
const { pathname, url, req, res, initialPath, handlePostRequest } = ctx;
|
|
const projectPath = url.searchParams.get('path') || initialPath;
|
|
const discoveriesDir = getDiscoveriesDir(projectPath);
|
|
|
|
// GET /api/discoveries - List all discovery sessions
|
|
if (pathname === '/api/discoveries' && req.method === 'GET') {
|
|
const index = readDiscoveryIndex(discoveriesDir);
|
|
|
|
// Enrich with state info
|
|
const enrichedDiscoveries = index.discoveries.map((d: any) => {
|
|
const state = readDiscoveryState(discoveriesDir, d.discovery_id);
|
|
const progress = readDiscoveryProgress(discoveriesDir, d.discovery_id);
|
|
return {
|
|
...d,
|
|
phase: state?.phase || 'unknown',
|
|
total_findings: state?.total_findings || 0,
|
|
issues_generated: state?.issues_generated || 0,
|
|
priority_distribution: state?.priority_distribution || {},
|
|
progress: progress?.progress || null
|
|
};
|
|
});
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({
|
|
discoveries: enrichedDiscoveries,
|
|
total: enrichedDiscoveries.length,
|
|
_metadata: { updated_at: new Date().toISOString() }
|
|
}));
|
|
return true;
|
|
}
|
|
|
|
// GET /api/discoveries/:id - Get discovery detail
|
|
const detailMatch = pathname.match(/^\/api\/discoveries\/([^/]+)$/);
|
|
if (detailMatch && req.method === 'GET') {
|
|
const discoveryId = detailMatch[1];
|
|
const state = readDiscoveryState(discoveriesDir, discoveryId);
|
|
|
|
if (!state) {
|
|
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ error: `Discovery ${discoveryId} not found` }));
|
|
return true;
|
|
}
|
|
|
|
const progress = readDiscoveryProgress(discoveriesDir, discoveryId);
|
|
const perspectiveResults = readPerspectiveFindings(discoveriesDir, discoveryId);
|
|
const discoveryIssues = readDiscoveryIssues(discoveriesDir, discoveryId);
|
|
|
|
// Read external research if exists
|
|
let externalResearch = null;
|
|
const externalPath = join(discoveriesDir, discoveryId, 'external-research.json');
|
|
if (existsSync(externalPath)) {
|
|
try {
|
|
externalResearch = JSON.parse(readFileSync(externalPath, 'utf8'));
|
|
} catch {
|
|
// Ignore
|
|
}
|
|
}
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({
|
|
...state,
|
|
progress: progress?.progress || null,
|
|
perspectives: perspectiveResults,
|
|
external_research: externalResearch,
|
|
discovery_issues: discoveryIssues
|
|
}));
|
|
return true;
|
|
}
|
|
|
|
// GET /api/discoveries/:id/findings - Get all findings
|
|
const findingsMatch = pathname.match(/^\/api\/discoveries\/([^/]+)\/findings$/);
|
|
if (findingsMatch && req.method === 'GET') {
|
|
const discoveryId = findingsMatch[1];
|
|
|
|
if (!existsSync(join(discoveriesDir, discoveryId))) {
|
|
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ error: `Discovery ${discoveryId} not found` }));
|
|
return true;
|
|
}
|
|
|
|
const perspectiveResults = readPerspectiveFindings(discoveriesDir, discoveryId);
|
|
const allFindings = flattenFindings(perspectiveResults);
|
|
|
|
// Support filtering
|
|
const perspectiveFilter = url.searchParams.get('perspective');
|
|
const priorityFilter = url.searchParams.get('priority');
|
|
|
|
let filtered = allFindings;
|
|
if (perspectiveFilter) {
|
|
filtered = filtered.filter(f => f.perspective === perspectiveFilter);
|
|
}
|
|
if (priorityFilter) {
|
|
filtered = filtered.filter(f => f.priority === priorityFilter);
|
|
}
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({
|
|
findings: filtered,
|
|
total: filtered.length,
|
|
perspectives: [...new Set(allFindings.map(f => f.perspective))],
|
|
_metadata: { discovery_id: discoveryId }
|
|
}));
|
|
return true;
|
|
}
|
|
|
|
// GET /api/discoveries/:id/progress - Get real-time progress
|
|
const progressMatch = pathname.match(/^\/api\/discoveries\/([^/]+)\/progress$/);
|
|
if (progressMatch && req.method === 'GET') {
|
|
const discoveryId = progressMatch[1];
|
|
const progress = readDiscoveryProgress(discoveriesDir, discoveryId);
|
|
|
|
if (!progress) {
|
|
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ error: `Progress for ${discoveryId} not found` }));
|
|
return true;
|
|
}
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify(progress));
|
|
return true;
|
|
}
|
|
|
|
// POST /api/discoveries/:id/export - Export findings as issues
|
|
const exportMatch = pathname.match(/^\/api\/discoveries\/([^/]+)\/export$/);
|
|
if (exportMatch && req.method === 'POST') {
|
|
handlePostRequest(req, res, async (body: any) => {
|
|
const discoveryId = exportMatch[1];
|
|
const { finding_ids, export_all } = body as { finding_ids?: string[]; export_all?: boolean };
|
|
|
|
if (!existsSync(join(discoveriesDir, discoveryId))) {
|
|
return { error: `Discovery ${discoveryId} not found` };
|
|
}
|
|
|
|
const perspectiveResults = readPerspectiveFindings(discoveriesDir, discoveryId);
|
|
const allFindings = flattenFindings(perspectiveResults);
|
|
|
|
let toExport: any[];
|
|
if (export_all) {
|
|
toExport = allFindings;
|
|
} else if (finding_ids && finding_ids.length > 0) {
|
|
toExport = allFindings.filter(f => finding_ids.includes(f.id));
|
|
} else {
|
|
return { error: 'Either finding_ids or export_all required' };
|
|
}
|
|
|
|
if (toExport.length === 0) {
|
|
return { error: 'No findings to export' };
|
|
}
|
|
|
|
// Convert findings to issue format
|
|
const issuesToExport = toExport.map((f, idx) => {
|
|
const suggestedIssue = f.suggested_issue || {};
|
|
return {
|
|
id: `ISS-${Date.now()}-${idx}`,
|
|
title: suggestedIssue.title || f.title,
|
|
priority: suggestedIssue.priority || 3,
|
|
context: f.description || '',
|
|
source: 'discovery',
|
|
source_discovery_id: discoveryId,
|
|
source_finding_id: f.id, // Track original finding ID for deduplication
|
|
perspective: f.perspective,
|
|
file: f.file,
|
|
line: f.line,
|
|
labels: suggestedIssue.labels || [f.perspective]
|
|
};
|
|
});
|
|
|
|
// Append to main issues.jsonl (with deduplication)
|
|
const result = appendToIssuesJsonl(projectPath, issuesToExport);
|
|
|
|
// Mark exported findings in perspective files
|
|
if (result.added > 0) {
|
|
const exportedFindingIds = new Set(
|
|
issuesToExport
|
|
.filter((_, idx) => !result.skippedIds.includes(issuesToExport[idx].id))
|
|
.map(i => i.source_finding_id)
|
|
);
|
|
|
|
// Update each perspective file to mark findings as exported
|
|
const perspectivesDir = join(discoveriesDir, discoveryId, 'perspectives');
|
|
if (existsSync(perspectivesDir)) {
|
|
const files = readdirSync(perspectivesDir).filter(f => f.endsWith('.json'));
|
|
for (const file of files) {
|
|
const filePath = join(perspectivesDir, file);
|
|
try {
|
|
const content = JSON.parse(readFileSync(filePath, 'utf8'));
|
|
if (content.findings) {
|
|
let modified = false;
|
|
for (const finding of content.findings) {
|
|
if (exportedFindingIds.has(finding.id) && !finding.exported) {
|
|
finding.exported = true;
|
|
finding.exported_at = new Date().toISOString();
|
|
modified = true;
|
|
}
|
|
}
|
|
if (modified) {
|
|
writeFileSync(filePath, JSON.stringify(content, null, 2));
|
|
}
|
|
}
|
|
} catch {
|
|
// Skip invalid files
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Update discovery state
|
|
const state = readDiscoveryState(discoveriesDir, discoveryId);
|
|
if (state) {
|
|
state.issues_generated = (state.issues_generated || 0) + result.added;
|
|
writeFileSync(
|
|
join(discoveriesDir, discoveryId, 'discovery-state.json'),
|
|
JSON.stringify(state, null, 2)
|
|
);
|
|
}
|
|
|
|
return {
|
|
success: true,
|
|
exported_count: result.added,
|
|
skipped_count: result.skipped,
|
|
skipped_ids: result.skippedIds,
|
|
message: result.skipped > 0
|
|
? `Exported ${result.added} issues, skipped ${result.skipped} duplicates`
|
|
: `Exported ${result.added} issues`
|
|
};
|
|
});
|
|
return true;
|
|
}
|
|
|
|
// PATCH /api/discoveries/:id/findings/:fid - Update finding status
|
|
const updateFindingMatch = pathname.match(/^\/api\/discoveries\/([^/]+)\/findings\/([^/]+)$/);
|
|
if (updateFindingMatch && req.method === 'PATCH') {
|
|
handlePostRequest(req, res, async (body: any) => {
|
|
const [, discoveryId, findingId] = updateFindingMatch;
|
|
const { status, dismissed } = body as { status?: string; dismissed?: boolean };
|
|
|
|
const perspectivesDir = join(discoveriesDir, discoveryId, 'perspectives');
|
|
if (!existsSync(perspectivesDir)) {
|
|
return { error: `Discovery ${discoveryId} not found` };
|
|
}
|
|
|
|
// Find and update the finding
|
|
const files = readdirSync(perspectivesDir).filter(f => f.endsWith('.json'));
|
|
let updated = false;
|
|
|
|
for (const file of files) {
|
|
const filePath = join(perspectivesDir, file);
|
|
try {
|
|
const content = JSON.parse(readFileSync(filePath, 'utf8'));
|
|
if (content.findings) {
|
|
const findingIndex = content.findings.findIndex((f: any) => f.id === findingId);
|
|
if (findingIndex !== -1) {
|
|
if (status !== undefined) {
|
|
content.findings[findingIndex].status = status;
|
|
}
|
|
if (dismissed !== undefined) {
|
|
content.findings[findingIndex].dismissed = dismissed;
|
|
}
|
|
content.findings[findingIndex].updated_at = new Date().toISOString();
|
|
writeFileSync(filePath, JSON.stringify(content, null, 2));
|
|
updated = true;
|
|
break;
|
|
}
|
|
}
|
|
} catch {
|
|
// Skip invalid files
|
|
}
|
|
}
|
|
|
|
if (!updated) {
|
|
return { error: `Finding ${findingId} not found` };
|
|
}
|
|
|
|
return { success: true, finding_id: findingId };
|
|
});
|
|
return true;
|
|
}
|
|
|
|
// DELETE /api/discoveries/:id - Delete discovery session
|
|
const deleteMatch = pathname.match(/^\/api\/discoveries\/([^/]+)$/);
|
|
if (deleteMatch && req.method === 'DELETE') {
|
|
const discoveryId = deleteMatch[1];
|
|
const discoveryPath = join(discoveriesDir, discoveryId);
|
|
|
|
if (!existsSync(discoveryPath)) {
|
|
res.writeHead(404, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ error: `Discovery ${discoveryId} not found` }));
|
|
return true;
|
|
}
|
|
|
|
try {
|
|
// Remove directory
|
|
rmSync(discoveryPath, { recursive: true, force: true });
|
|
|
|
// Update index
|
|
const index = readDiscoveryIndex(discoveriesDir);
|
|
index.discoveries = index.discoveries.filter((d: any) => d.discovery_id !== discoveryId);
|
|
index.total = index.discoveries.length;
|
|
writeDiscoveryIndex(discoveriesDir, index);
|
|
|
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ success: true, deleted: discoveryId }));
|
|
} catch (err) {
|
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
|
res.end(JSON.stringify({ error: 'Failed to delete discovery' }));
|
|
}
|
|
return true;
|
|
}
|
|
|
|
// Not handled
|
|
return false;
|
|
}
|