mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-10 02:24:35 +08:00
Add comprehensive tests for schema cleanup migration and search comparison
- Implement tests for migration 005 to verify removal of deprecated fields in the database schema. - Ensure that new databases are created with a clean schema. - Validate that keywords are correctly extracted from the normalized file_keywords table. - Test symbol insertion without deprecated fields and subdir operations without direct_files. - Create a detailed search comparison test to evaluate vector search vs hybrid search performance. - Add a script for reindexing projects to extract code relationships and verify GraphAnalyzer functionality. - Include a test script to check TreeSitter parser availability and relationship extraction from sample files.
This commit is contained in:
@@ -24,7 +24,13 @@ const MODULE_CSS_FILES = [
|
||||
'07-managers.css',
|
||||
'08-review.css',
|
||||
'09-explorer.css',
|
||||
'10-cli.css'
|
||||
'10-cli.css',
|
||||
'11-memory.css',
|
||||
'11-prompt-history.css',
|
||||
'12-skills-rules.css',
|
||||
'13-claude-manager.css',
|
||||
'14-graph-explorer.css',
|
||||
'15-mcp-manager.css'
|
||||
];
|
||||
|
||||
const MODULE_FILES = [
|
||||
@@ -57,6 +63,7 @@ const MODULE_FILES = [
|
||||
'views/lite-tasks.js',
|
||||
'views/fix-session.js',
|
||||
'views/cli-manager.js',
|
||||
'views/codexlens-manager.js',
|
||||
'views/explorer.js',
|
||||
'views/mcp-manager.js',
|
||||
'views/hook-manager.js',
|
||||
|
||||
@@ -104,45 +104,45 @@ export class HistoryImporter {
|
||||
|
||||
/**
|
||||
* Initialize database schema for conversation history
|
||||
* NOTE: Schema aligned with MemoryStore for seamless importing
|
||||
*/
|
||||
private initSchema(): void {
|
||||
this.db.exec(`
|
||||
-- Conversations table
|
||||
-- Conversations table (aligned with MemoryStore schema)
|
||||
CREATE TABLE IF NOT EXISTS conversations (
|
||||
id TEXT PRIMARY KEY,
|
||||
session_id TEXT NOT NULL,
|
||||
project_path TEXT,
|
||||
source TEXT DEFAULT 'ccw',
|
||||
external_id TEXT,
|
||||
project_name TEXT,
|
||||
git_branch TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
message_count INTEGER DEFAULT 0,
|
||||
total_tokens INTEGER DEFAULT 0,
|
||||
metadata TEXT
|
||||
quality_score INTEGER,
|
||||
turn_count INTEGER DEFAULT 0,
|
||||
prompt_preview TEXT
|
||||
);
|
||||
|
||||
-- Messages table
|
||||
-- Messages table (aligned with MemoryStore schema)
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id TEXT PRIMARY KEY,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
conversation_id TEXT NOT NULL,
|
||||
parent_id TEXT,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
role TEXT NOT NULL CHECK(role IN ('user', 'assistant', 'system')),
|
||||
content_text TEXT,
|
||||
content_json TEXT,
|
||||
timestamp TEXT NOT NULL,
|
||||
model TEXT,
|
||||
input_tokens INTEGER DEFAULT 0,
|
||||
output_tokens INTEGER DEFAULT 0,
|
||||
cwd TEXT,
|
||||
git_branch TEXT,
|
||||
token_count INTEGER,
|
||||
FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Tool calls table
|
||||
-- Tool calls table (aligned with MemoryStore schema)
|
||||
CREATE TABLE IF NOT EXISTS tool_calls (
|
||||
id TEXT PRIMARY KEY,
|
||||
message_id TEXT NOT NULL,
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
message_id INTEGER NOT NULL,
|
||||
tool_name TEXT NOT NULL,
|
||||
tool_input TEXT,
|
||||
tool_result TEXT,
|
||||
timestamp TEXT NOT NULL,
|
||||
tool_args TEXT,
|
||||
tool_output TEXT,
|
||||
status TEXT,
|
||||
duration_ms INTEGER,
|
||||
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
@@ -160,13 +160,11 @@ export class HistoryImporter {
|
||||
created_at TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_session ON conversations(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_project ON conversations(project_path);
|
||||
-- Indexes (aligned with MemoryStore)
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_created ON conversations(created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_updated ON conversations(updated_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(conversation_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_timestamp ON messages(timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_message ON tool_calls(message_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_name ON tool_calls(tool_name);
|
||||
`);
|
||||
}
|
||||
|
||||
@@ -332,17 +330,17 @@ export class HistoryImporter {
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
|
||||
const upsertConversation = this.db.prepare(`
|
||||
INSERT INTO conversations (id, session_id, project_path, created_at, updated_at, message_count, metadata)
|
||||
VALUES (@id, @session_id, @project_path, @created_at, @updated_at, 1, @metadata)
|
||||
INSERT INTO conversations (id, source, external_id, project_name, created_at, updated_at, turn_count, prompt_preview)
|
||||
VALUES (@id, @source, @external_id, @project_name, @created_at, @updated_at, 1, @prompt_preview)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
message_count = message_count + 1
|
||||
turn_count = turn_count + 1,
|
||||
prompt_preview = @prompt_preview
|
||||
`);
|
||||
|
||||
const upsertMessage = this.db.prepare(`
|
||||
INSERT INTO messages (id, conversation_id, role, content, timestamp, cwd)
|
||||
VALUES (@id, @conversation_id, 'user', @content, @timestamp, @cwd)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
INSERT INTO messages (conversation_id, role, content_text, timestamp)
|
||||
VALUES (@conversation_id, 'user', @content_text, @timestamp)
|
||||
`);
|
||||
|
||||
const insertHash = this.db.prepare(`
|
||||
@@ -354,7 +352,6 @@ export class HistoryImporter {
|
||||
for (const entry of entries) {
|
||||
try {
|
||||
const timestamp = new Date(entry.timestamp).toISOString();
|
||||
const messageId = `${entry.sessionId}-${entry.timestamp}`;
|
||||
const hash = this.generateHash(entry.sessionId, timestamp, entry.display);
|
||||
|
||||
// Check if hash exists
|
||||
@@ -364,29 +361,28 @@ export class HistoryImporter {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Insert conversation
|
||||
// Insert conversation (using MemoryStore-compatible fields)
|
||||
upsertConversation.run({
|
||||
id: entry.sessionId,
|
||||
session_id: entry.sessionId,
|
||||
project_path: entry.project,
|
||||
source: 'global_history',
|
||||
external_id: entry.sessionId,
|
||||
project_name: entry.project,
|
||||
created_at: timestamp,
|
||||
updated_at: timestamp,
|
||||
metadata: JSON.stringify({ source: 'global_history' })
|
||||
prompt_preview: entry.display.substring(0, 100)
|
||||
});
|
||||
|
||||
// Insert message
|
||||
upsertMessage.run({
|
||||
id: messageId,
|
||||
// Insert message (using MemoryStore-compatible fields)
|
||||
const insertResult = upsertMessage.run({
|
||||
conversation_id: entry.sessionId,
|
||||
content: entry.display,
|
||||
timestamp,
|
||||
cwd: entry.project
|
||||
content_text: entry.display,
|
||||
timestamp
|
||||
});
|
||||
|
||||
// Insert hash
|
||||
// Insert hash (using actual message ID from insert)
|
||||
insertHash.run({
|
||||
hash,
|
||||
message_id: messageId,
|
||||
message_id: String(insertResult.lastInsertRowid),
|
||||
created_at: timestamp
|
||||
});
|
||||
|
||||
@@ -413,24 +409,22 @@ export class HistoryImporter {
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
|
||||
const upsertConversation = this.db.prepare(`
|
||||
INSERT INTO conversations (id, session_id, project_path, created_at, updated_at, message_count, total_tokens, metadata)
|
||||
VALUES (@id, @session_id, @project_path, @created_at, @updated_at, @message_count, @total_tokens, @metadata)
|
||||
INSERT INTO conversations (id, source, external_id, project_name, git_branch, created_at, updated_at, turn_count, prompt_preview)
|
||||
VALUES (@id, @source, @external_id, @project_name, @git_branch, @created_at, @updated_at, @turn_count, @prompt_preview)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
message_count = @message_count,
|
||||
total_tokens = @total_tokens
|
||||
turn_count = @turn_count,
|
||||
prompt_preview = @prompt_preview
|
||||
`);
|
||||
|
||||
const upsertMessage = this.db.prepare(`
|
||||
INSERT INTO messages (id, conversation_id, parent_id, role, content, timestamp, model, input_tokens, output_tokens, cwd, git_branch)
|
||||
VALUES (@id, @conversation_id, @parent_id, @role, @content, @timestamp, @model, @input_tokens, @output_tokens, @cwd, @git_branch)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
INSERT INTO messages (conversation_id, role, content_text, content_json, timestamp, token_count)
|
||||
VALUES (@conversation_id, @role, @content_text, @content_json, @timestamp, @token_count)
|
||||
`);
|
||||
|
||||
const insertToolCall = this.db.prepare(`
|
||||
INSERT INTO tool_calls (id, message_id, tool_name, tool_input, tool_result, timestamp)
|
||||
VALUES (@id, @message_id, @tool_name, @tool_input, @tool_result, @timestamp)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
INSERT INTO tool_calls (message_id, tool_name, tool_args, tool_output, status)
|
||||
VALUES (@message_id, @tool_name, @tool_args, @tool_output, @status)
|
||||
`);
|
||||
|
||||
const insertHash = this.db.prepare(`
|
||||
@@ -439,27 +433,29 @@ export class HistoryImporter {
|
||||
`);
|
||||
|
||||
const transaction = this.db.transaction(() => {
|
||||
let totalTokens = 0;
|
||||
const firstMessage = messages[0];
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
const promptPreview = firstMessage?.message
|
||||
? this.extractTextContent(firstMessage.message.content).substring(0, 100)
|
||||
: '';
|
||||
|
||||
// Insert conversation FIRST (before messages, for foreign key constraint)
|
||||
upsertConversation.run({
|
||||
id: sessionId,
|
||||
session_id: sessionId,
|
||||
project_path: metadata.cwd || null,
|
||||
source: 'session_file',
|
||||
external_id: sessionId,
|
||||
project_name: metadata.cwd || null,
|
||||
git_branch: metadata.gitBranch || null,
|
||||
created_at: firstMessage.timestamp,
|
||||
updated_at: lastMessage.timestamp,
|
||||
message_count: 0,
|
||||
total_tokens: 0,
|
||||
metadata: JSON.stringify({ ...metadata, source: 'session_file' })
|
||||
turn_count: 0,
|
||||
prompt_preview: promptPreview
|
||||
});
|
||||
|
||||
for (const msg of messages) {
|
||||
if (!msg.message) continue;
|
||||
|
||||
try {
|
||||
const messageId = msg.uuid || `${sessionId}-${msg.timestamp}`;
|
||||
const content = this.extractTextContent(msg.message.content);
|
||||
const hash = this.generateHash(sessionId, msg.timestamp, content);
|
||||
|
||||
@@ -470,43 +466,44 @@ export class HistoryImporter {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate tokens
|
||||
// Calculate total tokens
|
||||
const inputTokens = msg.message.usage?.input_tokens || 0;
|
||||
const outputTokens = msg.message.usage?.output_tokens || 0;
|
||||
totalTokens += inputTokens + outputTokens;
|
||||
const totalTokens = inputTokens + outputTokens;
|
||||
|
||||
// Insert message
|
||||
upsertMessage.run({
|
||||
id: messageId,
|
||||
// Store content as JSON if complex, otherwise as text
|
||||
const contentJson = typeof msg.message.content === 'object'
|
||||
? JSON.stringify(msg.message.content)
|
||||
: null;
|
||||
|
||||
// Insert message (using MemoryStore-compatible fields)
|
||||
const insertResult = upsertMessage.run({
|
||||
conversation_id: sessionId,
|
||||
parent_id: msg.parentUuid || null,
|
||||
role: msg.message.role,
|
||||
content,
|
||||
content_text: content,
|
||||
content_json: contentJson,
|
||||
timestamp: msg.timestamp,
|
||||
model: msg.message.model || null,
|
||||
input_tokens: inputTokens,
|
||||
output_tokens: outputTokens,
|
||||
cwd: msg.cwd || metadata.cwd || null,
|
||||
git_branch: msg.gitBranch || metadata.gitBranch || null
|
||||
token_count: totalTokens
|
||||
});
|
||||
|
||||
const messageId = insertResult.lastInsertRowid as number;
|
||||
|
||||
// Extract and insert tool calls
|
||||
const toolCalls = this.extractToolCalls(msg.message.content);
|
||||
for (const tool of toolCalls) {
|
||||
insertToolCall.run({
|
||||
id: tool.id || `${messageId}-${tool.name}`,
|
||||
message_id: messageId,
|
||||
tool_name: tool.name,
|
||||
tool_input: JSON.stringify(tool.input),
|
||||
tool_result: tool.result || null,
|
||||
timestamp: msg.timestamp
|
||||
tool_args: JSON.stringify(tool.input),
|
||||
tool_output: tool.result || null,
|
||||
status: 'success'
|
||||
});
|
||||
}
|
||||
|
||||
// Insert hash
|
||||
// Insert hash (using actual message ID from insert)
|
||||
insertHash.run({
|
||||
hash,
|
||||
message_id: messageId,
|
||||
message_id: String(messageId),
|
||||
created_at: msg.timestamp
|
||||
});
|
||||
|
||||
@@ -520,13 +517,14 @@ export class HistoryImporter {
|
||||
// Update conversation with final counts
|
||||
upsertConversation.run({
|
||||
id: sessionId,
|
||||
session_id: sessionId,
|
||||
project_path: metadata.cwd || null,
|
||||
source: 'session_file',
|
||||
external_id: sessionId,
|
||||
project_name: metadata.cwd || null,
|
||||
git_branch: metadata.gitBranch || null,
|
||||
created_at: firstMessage.timestamp,
|
||||
updated_at: lastMessage.timestamp,
|
||||
message_count: result.imported,
|
||||
total_tokens: totalTokens,
|
||||
metadata: JSON.stringify({ ...metadata, source: 'session_file' })
|
||||
turn_count: result.imported,
|
||||
prompt_preview: promptPreview
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -90,6 +90,8 @@ export interface ToolCall {
|
||||
id?: number;
|
||||
message_id: number;
|
||||
tool_name: string;
|
||||
// NOTE: Naming inconsistency - using tool_args/tool_output vs tool_input/tool_result in HistoryImporter
|
||||
// Kept for backward compatibility with existing databases
|
||||
tool_args?: string;
|
||||
tool_output?: string;
|
||||
status?: string;
|
||||
@@ -114,8 +116,10 @@ export interface EntityWithAssociations extends Entity {
|
||||
export class MemoryStore {
|
||||
private db: Database.Database;
|
||||
private dbPath: string;
|
||||
private projectPath: string;
|
||||
|
||||
constructor(projectPath: string) {
|
||||
this.projectPath = projectPath;
|
||||
// Use centralized storage path
|
||||
const paths = StoragePaths.project(projectPath);
|
||||
const memoryDir = paths.memory;
|
||||
@@ -315,6 +319,22 @@ export class MemoryStore {
|
||||
`);
|
||||
console.log('[Memory Store] Migration complete: relative_path column added');
|
||||
}
|
||||
|
||||
// Add missing timestamp index for messages table (for time-based queries)
|
||||
try {
|
||||
const indexExists = this.db.prepare(`
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='index' AND name='idx_messages_timestamp'
|
||||
`).get();
|
||||
|
||||
if (!indexExists) {
|
||||
console.log('[Memory Store] Adding missing timestamp index to messages table...');
|
||||
this.db.exec(`CREATE INDEX IF NOT EXISTS idx_messages_timestamp ON messages(timestamp DESC);`);
|
||||
console.log('[Memory Store] Migration complete: messages timestamp index added');
|
||||
}
|
||||
} catch (indexErr) {
|
||||
console.warn('[Memory Store] Messages timestamp index creation warning:', (indexErr as Error).message);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error('[Memory Store] Migration error:', (err as Error).message);
|
||||
// Don't throw - allow the store to continue working with existing schema
|
||||
@@ -597,13 +617,15 @@ export class MemoryStore {
|
||||
*/
|
||||
saveConversation(conversation: Conversation): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO conversations (id, source, external_id, project_name, git_branch, created_at, updated_at, quality_score, turn_count, prompt_preview)
|
||||
VALUES (@id, @source, @external_id, @project_name, @git_branch, @created_at, @updated_at, @quality_score, @turn_count, @prompt_preview)
|
||||
INSERT INTO conversations (id, source, external_id, project_name, git_branch, created_at, updated_at, quality_score, turn_count, prompt_preview, project_root, relative_path)
|
||||
VALUES (@id, @source, @external_id, @project_name, @git_branch, @created_at, @updated_at, @quality_score, @turn_count, @prompt_preview, @project_root, @relative_path)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
quality_score = @quality_score,
|
||||
turn_count = @turn_count,
|
||||
prompt_preview = @prompt_preview
|
||||
prompt_preview = @prompt_preview,
|
||||
project_root = @project_root,
|
||||
relative_path = @relative_path
|
||||
`);
|
||||
|
||||
stmt.run({
|
||||
@@ -616,7 +638,9 @@ export class MemoryStore {
|
||||
updated_at: conversation.updated_at,
|
||||
quality_score: conversation.quality_score || null,
|
||||
turn_count: conversation.turn_count,
|
||||
prompt_preview: conversation.prompt_preview || null
|
||||
prompt_preview: conversation.prompt_preview || null,
|
||||
project_root: this.projectPath,
|
||||
relative_path: null // For future hierarchical tracking
|
||||
});
|
||||
}
|
||||
|
||||
@@ -737,15 +761,15 @@ export function getMemoryStore(projectPath: string): MemoryStore {
|
||||
* @param projectPath - Parent project path
|
||||
* @returns Aggregated statistics from all projects
|
||||
*/
|
||||
export function getAggregatedStats(projectPath: string): {
|
||||
export async function getAggregatedStats(projectPath: string): Promise<{
|
||||
entities: number;
|
||||
prompts: number;
|
||||
conversations: number;
|
||||
total: number;
|
||||
projects: Array<{ path: string; stats: { entities: number; prompts: number; conversations: number } }>;
|
||||
} {
|
||||
const { scanChildProjects } = require('../config/storage-paths.js');
|
||||
const childProjects = scanChildProjects(projectPath);
|
||||
}> {
|
||||
const { scanChildProjectsAsync } = await import('../config/storage-paths.js');
|
||||
const childProjects = await scanChildProjectsAsync(projectPath);
|
||||
|
||||
const projectStats: Array<{ path: string; stats: { entities: number; prompts: number; conversations: number } }> = [];
|
||||
let totalEntities = 0;
|
||||
@@ -813,12 +837,12 @@ export function getAggregatedStats(projectPath: string): {
|
||||
* @param options - Query options
|
||||
* @returns Combined entities from all projects with source information
|
||||
*/
|
||||
export function getAggregatedEntities(
|
||||
export async function getAggregatedEntities(
|
||||
projectPath: string,
|
||||
options: { type?: string; limit?: number; offset?: number } = {}
|
||||
): Array<HotEntity & { sourceProject?: string }> {
|
||||
const { scanChildProjects } = require('../config/storage-paths.js');
|
||||
const childProjects = scanChildProjects(projectPath);
|
||||
): Promise<Array<HotEntity & { sourceProject?: string }>> {
|
||||
const { scanChildProjectsAsync } = await import('../config/storage-paths.js');
|
||||
const childProjects = await scanChildProjectsAsync(projectPath);
|
||||
|
||||
const limit = options.limit || 50;
|
||||
const offset = options.offset || 0;
|
||||
@@ -892,12 +916,12 @@ export function getAggregatedEntities(
|
||||
* @param limit - Maximum number of prompts to return
|
||||
* @returns Combined prompts from all projects with source information
|
||||
*/
|
||||
export function getAggregatedPrompts(
|
||||
export async function getAggregatedPrompts(
|
||||
projectPath: string,
|
||||
limit: number = 50
|
||||
): Array<PromptHistory & { sourceProject?: string }> {
|
||||
const { scanChildProjects } = require('../config/storage-paths.js');
|
||||
const childProjects = scanChildProjects(projectPath);
|
||||
): Promise<Array<PromptHistory & { sourceProject?: string }>> {
|
||||
const { scanChildProjectsAsync } = await import('../config/storage-paths.js');
|
||||
const childProjects = await scanChildProjectsAsync(projectPath);
|
||||
|
||||
const allPrompts: Array<PromptHistory & { sourceProject?: string }> = [];
|
||||
|
||||
|
||||
@@ -212,7 +212,7 @@ export async function handleCliRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const status = url.searchParams.get('status') || null;
|
||||
const category = url.searchParams.get('category') as 'user' | 'internal' | 'insight' | null;
|
||||
const search = url.searchParams.get('search') || null;
|
||||
const recursive = url.searchParams.get('recursive') === 'true';
|
||||
const recursive = url.searchParams.get('recursive') !== 'false';
|
||||
|
||||
getExecutionHistoryAsync(projectPath, { limit, tool, status, category, search, recursive })
|
||||
.then(history => {
|
||||
|
||||
@@ -23,6 +23,37 @@ export interface RouteContext {
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Strip ANSI color codes from string
|
||||
* Rich library adds color codes even with --json flag
|
||||
*/
|
||||
function stripAnsiCodes(str: string): string {
|
||||
// ANSI escape code pattern: \x1b[...m or \x1b]...
|
||||
return str.replace(/\x1b\[[0-9;]*m/g, '')
|
||||
.replace(/\x1b\][0-9;]*\x07/g, '')
|
||||
.replace(/\x1b\][^\x07]*\x07/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract JSON from CLI output that may contain logging messages
|
||||
* CodexLens CLI outputs logs like "INFO ..." before the JSON
|
||||
* Also strips ANSI color codes that Rich library adds
|
||||
*/
|
||||
function extractJSON(output: string): any {
|
||||
// Strip ANSI color codes first
|
||||
const cleanOutput = stripAnsiCodes(output);
|
||||
|
||||
// Find the first { or [ character (start of JSON)
|
||||
const jsonStart = cleanOutput.search(/[{\[]/);
|
||||
if (jsonStart === -1) {
|
||||
throw new Error('No JSON found in output');
|
||||
}
|
||||
|
||||
// Extract everything from the first { or [ onwards
|
||||
const jsonString = cleanOutput.substring(jsonStart);
|
||||
return JSON.parse(jsonString);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle CodexLens routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
@@ -83,23 +114,45 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Config - GET (Get current configuration)
|
||||
// API: CodexLens Config - GET (Get current configuration with index count)
|
||||
if (pathname === '/api/codexlens/config' && req.method === 'GET') {
|
||||
try {
|
||||
const result = await executeCodexLens(['config-show', '--json']);
|
||||
if (result.success) {
|
||||
// Fetch both config and status to merge index_count
|
||||
const [configResult, statusResult] = await Promise.all([
|
||||
executeCodexLens(['config', '--json']),
|
||||
executeCodexLens(['status', '--json'])
|
||||
]);
|
||||
|
||||
let responseData = { index_dir: '~/.codexlens/indexes', index_count: 0 };
|
||||
|
||||
// Parse config (extract JSON from output that may contain log messages)
|
||||
if (configResult.success) {
|
||||
try {
|
||||
const config = JSON.parse(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(config));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ index_dir: '~/.codexlens/indexes', index_count: 0 }));
|
||||
const config = extractJSON(configResult.output);
|
||||
if (config.success && config.result) {
|
||||
responseData.index_dir = config.result.index_root || responseData.index_dir;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[CodexLens] Failed to parse config:', e.message);
|
||||
console.error('[CodexLens] Config output:', configResult.output.substring(0, 200));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ index_dir: '~/.codexlens/indexes', index_count: 0 }));
|
||||
}
|
||||
|
||||
// Parse status to get index_count (projects_count)
|
||||
if (statusResult.success) {
|
||||
try {
|
||||
const status = extractJSON(statusResult.output);
|
||||
if (status.success && status.result) {
|
||||
responseData.index_count = status.result.projects_count || 0;
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[CodexLens] Failed to parse status:', e.message);
|
||||
console.error('[CodexLens] Status output:', statusResult.output.substring(0, 200));
|
||||
}
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(responseData));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: err.message }));
|
||||
@@ -168,7 +221,7 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
const result = await executeCodexLens(['init', targetPath, '--json'], { cwd: targetPath });
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = JSON.parse(result.output);
|
||||
const parsed = extractJSON(result.output);
|
||||
return { success: true, result: parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
@@ -237,7 +290,7 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
const result = await executeCodexLens(args, { cwd: targetPath, timeout: timeoutMs + 30000 });
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = JSON.parse(result.output);
|
||||
const parsed = extractJSON(result.output);
|
||||
return { success: true, result: parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
@@ -253,10 +306,11 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
}
|
||||
|
||||
|
||||
// API: CodexLens Search (FTS5 text search)
|
||||
// API: CodexLens Search (FTS5 text search with mode support)
|
||||
if (pathname === '/api/codexlens/search') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query) {
|
||||
@@ -266,13 +320,13 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
}
|
||||
|
||||
try {
|
||||
const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--json'];
|
||||
const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--mode', mode, '--json'];
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = JSON.parse(result.output);
|
||||
const parsed = extractJSON(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...parsed.result }));
|
||||
} catch {
|
||||
@@ -290,10 +344,11 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Search Files Only (return file paths only)
|
||||
// API: CodexLens Search Files Only (return file paths only, with mode support)
|
||||
if (pathname === '/api/codexlens/search_files') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const mode = url.searchParams.get('mode') || 'exact'; // exact, fuzzy, hybrid, vector
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query) {
|
||||
@@ -303,13 +358,13 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
}
|
||||
|
||||
try {
|
||||
const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--files-only', '--json'];
|
||||
const args = ['search', query, '--path', projectPath, '--limit', limit.toString(), '--mode', mode, '--files-only', '--json'];
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = JSON.parse(result.output);
|
||||
const parsed = extractJSON(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...parsed.result }));
|
||||
} catch {
|
||||
@@ -327,6 +382,51 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Symbol Search (search for symbols by name)
|
||||
if (pathname === '/api/codexlens/symbol') {
|
||||
const query = url.searchParams.get('query') || '';
|
||||
const file = url.searchParams.get('file');
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!query && !file) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Either query or file parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
let args;
|
||||
if (file) {
|
||||
// Get symbols from a specific file
|
||||
args = ['symbol', '--file', file, '--json'];
|
||||
} else {
|
||||
// Search for symbols by name
|
||||
args = ['symbol', query, '--path', projectPath, '--limit', limit.toString(), '--json'];
|
||||
}
|
||||
|
||||
const result = await executeCodexLens(args, { cwd: projectPath });
|
||||
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, ...parsed.result }));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, symbols: [], output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err.message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
// API: CodexLens Semantic Search Install (fastembed, ONNX-based, ~200MB)
|
||||
if (pathname === '/api/codexlens/semantic/install' && req.method === 'POST') {
|
||||
@@ -350,5 +450,117 @@ export async function handleCodexLensRoutes(ctx: RouteContext): Promise<boolean>
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model List (list available embedding models)
|
||||
if (pathname === '/api/codexlens/models' && req.method === 'GET') {
|
||||
try {
|
||||
const result = await executeCodexLens(['model-list', '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, result: { models: [] }, output: result.output }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err.message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Download (download embedding model by profile)
|
||||
if (pathname === '/api/codexlens/models/download' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body;
|
||||
|
||||
if (!profile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-download', profile, '--json'], { timeout: 600000 }); // 10 min for download
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output);
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err) {
|
||||
return { success: false, error: err.message, status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Delete (delete embedding model by profile)
|
||||
if (pathname === '/api/codexlens/models/delete' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { profile } = body;
|
||||
|
||||
if (!profile) {
|
||||
return { success: false, error: 'profile is required', status: 400 };
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-delete', profile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output);
|
||||
return { success: true, ...parsed };
|
||||
} catch {
|
||||
return { success: true, output: result.output };
|
||||
}
|
||||
} else {
|
||||
return { success: false, error: result.error, status: 500 };
|
||||
}
|
||||
} catch (err) {
|
||||
return { success: false, error: err.message, status: 500 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// API: CodexLens Model Info (get model info by profile)
|
||||
if (pathname === '/api/codexlens/models/info' && req.method === 'GET') {
|
||||
const profile = url.searchParams.get('profile');
|
||||
|
||||
if (!profile) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'profile parameter is required' }));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await executeCodexLens(['model-info', profile, '--json']);
|
||||
if (result.success) {
|
||||
try {
|
||||
const parsed = extractJSON(result.output);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(parsed));
|
||||
} catch {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: 'Failed to parse response' }));
|
||||
}
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: result.error }));
|
||||
}
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: err.message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -20,9 +20,7 @@ Query all symbols from the CodexLens SQLite database and return them as graph no
|
||||
"name": "functionName",
|
||||
"type": "FUNCTION",
|
||||
"file": "src/file.ts",
|
||||
"line": 10,
|
||||
"docstring": "function_type",
|
||||
"tokenCount": 45
|
||||
"line": 10
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -98,7 +96,7 @@ Maps source code paths to CodexLens index database paths following the storage s
|
||||
### Database Schema
|
||||
Queries two main tables:
|
||||
1. **symbols** - Code symbol definitions
|
||||
- `id`, `file_id`, `name`, `kind`, `start_line`, `end_line`, `token_count`, `symbol_type`
|
||||
- `id`, `file_id`, `name`, `kind`, `start_line`, `end_line`
|
||||
2. **code_relationships** - Inter-symbol dependencies
|
||||
- `id`, `source_symbol_id`, `target_qualified_name`, `relationship_type`, `source_line`, `target_file`
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { homedir } from 'os';
|
||||
import { join, resolve, normalize } from 'path';
|
||||
import { existsSync } from 'fs';
|
||||
import { existsSync, readdirSync } from 'fs';
|
||||
import Database from 'better-sqlite3';
|
||||
|
||||
export interface RouteContext {
|
||||
@@ -63,8 +63,6 @@ interface GraphNode {
|
||||
type: string;
|
||||
file: string;
|
||||
line: number;
|
||||
docstring?: string;
|
||||
tokenCount?: number;
|
||||
}
|
||||
|
||||
interface GraphEdge {
|
||||
@@ -108,6 +106,36 @@ function validateProjectPath(projectPath: string, initialPath: string): string |
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all _index.db files recursively in a directory
|
||||
* @param dir Directory to search
|
||||
* @returns Array of absolute paths to _index.db files
|
||||
*/
|
||||
function findAllIndexDbs(dir: string): string[] {
|
||||
const dbs: string[] = [];
|
||||
|
||||
function traverse(currentDir: string): void {
|
||||
const dbPath = join(currentDir, '_index.db');
|
||||
if (existsSync(dbPath)) {
|
||||
dbs.push(dbPath);
|
||||
}
|
||||
|
||||
try {
|
||||
const entries = readdirSync(currentDir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
traverse(join(currentDir, entry.name));
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Silently skip directories we can't read
|
||||
}
|
||||
}
|
||||
|
||||
traverse(dir);
|
||||
return dbs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Map codex-lens symbol kinds to graph node types
|
||||
*/
|
||||
@@ -138,93 +166,117 @@ function mapRelationType(relType: string): string {
|
||||
}
|
||||
|
||||
/**
|
||||
* Query symbols from codex-lens database
|
||||
* Query symbols from all codex-lens databases (hierarchical structure)
|
||||
*/
|
||||
async function querySymbols(projectPath: string): Promise<GraphNode[]> {
|
||||
const mapper = new PathMapper();
|
||||
const dbPath = mapper.sourceToIndexDb(projectPath);
|
||||
const rootDbPath = mapper.sourceToIndexDb(projectPath);
|
||||
const indexRoot = rootDbPath.replace(/[\\/]_index\.db$/, '');
|
||||
|
||||
if (!existsSync(dbPath)) {
|
||||
if (!existsSync(indexRoot)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const db = Database(dbPath, { readonly: true });
|
||||
// Find all _index.db files recursively
|
||||
const dbPaths = findAllIndexDbs(indexRoot);
|
||||
|
||||
const rows = db.prepare(`
|
||||
SELECT
|
||||
s.id,
|
||||
s.name,
|
||||
s.kind,
|
||||
s.start_line,
|
||||
s.token_count,
|
||||
s.symbol_type,
|
||||
f.path as file
|
||||
FROM symbols s
|
||||
JOIN files f ON s.file_id = f.id
|
||||
ORDER BY f.path, s.start_line
|
||||
`).all();
|
||||
|
||||
db.close();
|
||||
|
||||
return rows.map((row: any) => ({
|
||||
id: `${row.file}:${row.name}:${row.start_line}`,
|
||||
name: row.name,
|
||||
type: mapSymbolKind(row.kind),
|
||||
file: row.file,
|
||||
line: row.start_line,
|
||||
docstring: row.symbol_type || undefined,
|
||||
tokenCount: row.token_count || undefined,
|
||||
}));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`[Graph] Failed to query symbols: ${message}`);
|
||||
if (dbPaths.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const allNodes: GraphNode[] = [];
|
||||
|
||||
for (const dbPath of dbPaths) {
|
||||
try {
|
||||
const db = Database(dbPath, { readonly: true });
|
||||
|
||||
const rows = db.prepare(`
|
||||
SELECT
|
||||
s.id,
|
||||
s.name,
|
||||
s.kind,
|
||||
s.start_line,
|
||||
f.full_path as file
|
||||
FROM symbols s
|
||||
JOIN files f ON s.file_id = f.id
|
||||
ORDER BY f.full_path, s.start_line
|
||||
`).all();
|
||||
|
||||
db.close();
|
||||
|
||||
allNodes.push(...rows.map((row: any) => ({
|
||||
id: `${row.file}:${row.name}:${row.start_line}`,
|
||||
name: row.name,
|
||||
type: mapSymbolKind(row.kind),
|
||||
file: row.file,
|
||||
line: row.start_line,
|
||||
})));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`[Graph] Failed to query symbols from ${dbPath}: ${message}`);
|
||||
// Continue with other databases even if one fails
|
||||
}
|
||||
}
|
||||
|
||||
return allNodes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query code relationships from codex-lens database
|
||||
* Query code relationships from all codex-lens databases (hierarchical structure)
|
||||
*/
|
||||
async function queryRelationships(projectPath: string): Promise<GraphEdge[]> {
|
||||
const mapper = new PathMapper();
|
||||
const dbPath = mapper.sourceToIndexDb(projectPath);
|
||||
const rootDbPath = mapper.sourceToIndexDb(projectPath);
|
||||
const indexRoot = rootDbPath.replace(/[\\/]_index\.db$/, '');
|
||||
|
||||
if (!existsSync(dbPath)) {
|
||||
if (!existsSync(indexRoot)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
try {
|
||||
const db = Database(dbPath, { readonly: true });
|
||||
// Find all _index.db files recursively
|
||||
const dbPaths = findAllIndexDbs(indexRoot);
|
||||
|
||||
const rows = db.prepare(`
|
||||
SELECT
|
||||
s.name as source_name,
|
||||
s.start_line as source_line,
|
||||
f.path as source_file,
|
||||
r.target_qualified_name,
|
||||
r.relationship_type,
|
||||
r.target_file
|
||||
FROM code_relationships r
|
||||
JOIN symbols s ON r.source_symbol_id = s.id
|
||||
JOIN files f ON s.file_id = f.id
|
||||
ORDER BY f.path, s.start_line
|
||||
`).all();
|
||||
|
||||
db.close();
|
||||
|
||||
return rows.map((row: any) => ({
|
||||
source: `${row.source_file}:${row.source_name}:${row.source_line}`,
|
||||
target: row.target_qualified_name,
|
||||
type: mapRelationType(row.relationship_type),
|
||||
sourceLine: row.source_line,
|
||||
sourceFile: row.source_file,
|
||||
}));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`[Graph] Failed to query relationships: ${message}`);
|
||||
if (dbPaths.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const allEdges: GraphEdge[] = [];
|
||||
|
||||
for (const dbPath of dbPaths) {
|
||||
try {
|
||||
const db = Database(dbPath, { readonly: true });
|
||||
|
||||
const rows = db.prepare(`
|
||||
SELECT
|
||||
s.name as source_name,
|
||||
s.start_line as source_line,
|
||||
f.full_path as source_file,
|
||||
r.target_qualified_name,
|
||||
r.relationship_type,
|
||||
r.target_file
|
||||
FROM code_relationships r
|
||||
JOIN symbols s ON r.source_symbol_id = s.id
|
||||
JOIN files f ON s.file_id = f.id
|
||||
ORDER BY f.full_path, s.start_line
|
||||
`).all();
|
||||
|
||||
db.close();
|
||||
|
||||
allEdges.push(...rows.map((row: any) => ({
|
||||
source: `${row.source_file}:${row.source_name}:${row.source_line}`,
|
||||
target: row.target_qualified_name,
|
||||
type: mapRelationType(row.relationship_type),
|
||||
sourceLine: row.source_line,
|
||||
sourceFile: row.source_file,
|
||||
})));
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`[Graph] Failed to query relationships from ${dbPath}: ${message}`);
|
||||
// Continue with other databases even if one fails
|
||||
}
|
||||
}
|
||||
|
||||
return allEdges;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -292,7 +344,7 @@ async function analyzeImpact(projectPath: string, symbolId: string): Promise<Imp
|
||||
const rows = db.prepare(`
|
||||
SELECT DISTINCT
|
||||
s.name as dependent_name,
|
||||
f.path as dependent_file,
|
||||
f.full_path as dependent_file,
|
||||
s.start_line as dependent_line
|
||||
FROM code_relationships r
|
||||
JOIN symbols s ON r.source_symbol_id = s.id
|
||||
@@ -330,6 +382,8 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (pathname === '/api/graph/nodes') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const limitStr = url.searchParams.get('limit') || '1000';
|
||||
const limit = Math.min(parseInt(limitStr, 10) || 1000, 5000); // Max 5000 nodes
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
@@ -338,9 +392,15 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
}
|
||||
|
||||
try {
|
||||
const nodes = await querySymbols(projectPath);
|
||||
const allNodes = await querySymbols(projectPath);
|
||||
const nodes = allNodes.slice(0, limit);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ nodes }));
|
||||
res.end(JSON.stringify({
|
||||
nodes,
|
||||
total: allNodes.length,
|
||||
limit,
|
||||
hasMore: allNodes.length > limit
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error(`[Graph] Error fetching nodes:`, err);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
@@ -353,6 +413,8 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
if (pathname === '/api/graph/edges') {
|
||||
const rawPath = url.searchParams.get('path') || initialPath;
|
||||
const projectPath = validateProjectPath(rawPath, initialPath);
|
||||
const limitStr = url.searchParams.get('limit') || '2000';
|
||||
const limit = Math.min(parseInt(limitStr, 10) || 2000, 10000); // Max 10000 edges
|
||||
|
||||
if (!projectPath) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
@@ -361,9 +423,15 @@ export async function handleGraphRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
}
|
||||
|
||||
try {
|
||||
const edges = await queryRelationships(projectPath);
|
||||
const allEdges = await queryRelationships(projectPath);
|
||||
const edges = allEdges.slice(0, limit);
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ edges }));
|
||||
res.end(JSON.stringify({
|
||||
edges,
|
||||
total: allEdges.length,
|
||||
limit,
|
||||
hasMore: allEdges.length > limit
|
||||
}));
|
||||
} catch (err) {
|
||||
console.error(`[Graph] Error fetching edges:`, err);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
// @ts-nocheck
|
||||
import http from 'http';
|
||||
import { URL } from 'url';
|
||||
import { readFileSync, writeFileSync, existsSync, mkdirSync, statSync, unlinkSync } from 'fs';
|
||||
@@ -222,7 +221,7 @@ export async function handleMemoryRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||
const search = url.searchParams.get('search') || null;
|
||||
const recursive = url.searchParams.get('recursive') === 'true';
|
||||
const recursive = url.searchParams.get('recursive') !== 'false';
|
||||
|
||||
try {
|
||||
let prompts;
|
||||
@@ -230,7 +229,7 @@ export async function handleMemoryRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
// Recursive mode: aggregate prompts from parent and child projects
|
||||
if (recursive && !search) {
|
||||
const { getAggregatedPrompts } = await import('../memory-store.js');
|
||||
prompts = getAggregatedPrompts(projectPath, limit);
|
||||
prompts = await getAggregatedPrompts(projectPath, limit);
|
||||
} else {
|
||||
// Non-recursive mode or search mode: query only current project
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
@@ -390,11 +389,11 @@ Return ONLY valid JSON in this exact format (no markdown, no code blocks, just p
|
||||
mode: 'analysis',
|
||||
timeout: 120000,
|
||||
cd: projectPath,
|
||||
category: 'insights'
|
||||
category: 'insight'
|
||||
});
|
||||
|
||||
// Try to parse JSON from response
|
||||
let insights = { patterns: [], suggestions: [] };
|
||||
let insights: { patterns: any[]; suggestions: any[] } = { patterns: [], suggestions: [] };
|
||||
if (result.stdout) {
|
||||
let outputText = result.stdout;
|
||||
|
||||
@@ -515,13 +514,13 @@ Return ONLY valid JSON in this exact format (no markdown, no code blocks, just p
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const filter = url.searchParams.get('filter') || 'all'; // today, week, all
|
||||
const limit = parseInt(url.searchParams.get('limit') || '10', 10);
|
||||
const recursive = url.searchParams.get('recursive') === 'true';
|
||||
const recursive = url.searchParams.get('recursive') !== 'false';
|
||||
|
||||
try {
|
||||
// If requesting aggregated stats, use the aggregated function
|
||||
if (url.searchParams.has('aggregated') || recursive) {
|
||||
const { getAggregatedStats } = await import('../memory-store.js');
|
||||
const aggregatedStats = getAggregatedStats(projectPath);
|
||||
const aggregatedStats = await getAggregatedStats(projectPath);
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
|
||||
57
ccw/src/core/routes/status-routes.ts
Normal file
57
ccw/src/core/routes/status-routes.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
// @ts-nocheck
|
||||
/**
|
||||
* Status Routes Module
|
||||
* Aggregated status endpoint for faster dashboard loading
|
||||
*/
|
||||
import type { IncomingMessage, ServerResponse } from 'http';
|
||||
import { getCliToolsStatus } from '../../tools/cli-executor.js';
|
||||
import { checkVenvStatus, checkSemanticStatus } from '../../tools/codex-lens.js';
|
||||
|
||||
export interface RouteContext {
|
||||
pathname: string;
|
||||
url: URL;
|
||||
req: IncomingMessage;
|
||||
res: ServerResponse;
|
||||
initialPath: string;
|
||||
handlePostRequest: (req: IncomingMessage, res: ServerResponse, handler: (body: unknown) => Promise<any>) => void;
|
||||
broadcastToClients: (data: unknown) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle status routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleStatusRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, res } = ctx;
|
||||
|
||||
// API: Aggregated Status (all statuses in one call)
|
||||
if (pathname === '/api/status/all') {
|
||||
try {
|
||||
// Execute all status checks in parallel
|
||||
const [cliStatus, codexLensStatus, semanticStatus] = await Promise.all([
|
||||
getCliToolsStatus(),
|
||||
checkVenvStatus(),
|
||||
// Always check semantic status (will return available: false if CodexLens not ready)
|
||||
checkSemanticStatus().catch(() => ({ available: false, backend: null }))
|
||||
]);
|
||||
|
||||
const response = {
|
||||
cli: cliStatus,
|
||||
codexLens: codexLensStatus,
|
||||
semantic: semanticStatus,
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(response));
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[Status Routes] Error fetching aggregated status:', error);
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import { join } from 'path';
|
||||
import { resolvePath, getRecentPaths, normalizePathForDisplay } from '../utils/path-resolver.js';
|
||||
|
||||
// Import route handlers
|
||||
import { handleStatusRoutes } from './routes/status-routes.js';
|
||||
import { handleCliRoutes } from './routes/cli-routes.js';
|
||||
import { handleMemoryRoutes } from './routes/memory-routes.js';
|
||||
import { handleMcpRoutes } from './routes/mcp-routes.js';
|
||||
@@ -243,6 +244,11 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
// Try each route handler in order
|
||||
// Order matters: more specific routes should come before general ones
|
||||
|
||||
// Status routes (/api/status/*) - Aggregated endpoint for faster loading
|
||||
if (pathname.startsWith('/api/status/')) {
|
||||
if (await handleStatusRoutes(routeContext)) return;
|
||||
}
|
||||
|
||||
// CLI routes (/api/cli/*)
|
||||
if (pathname.startsWith('/api/cli/')) {
|
||||
if (await handleCliRoutes(routeContext)) return;
|
||||
|
||||
Reference in New Issue
Block a user