mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: Implement resume strategy engine and session content parser
- Added `resume-strategy.ts` to determine optimal resume approaches including native, prompt concatenation, and hybrid modes. - Introduced `determineResumeStrategy` function to evaluate various resume scenarios. - Created utility functions for building context prefixes and formatting outputs in plain, YAML, and JSON formats. - Added `session-content-parser.ts` to parse native CLI tool session files supporting Gemini/Qwen JSON and Codex JSONL formats. - Implemented parsing logic for different session formats, including error handling for invalid lines. - Provided functions to format conversations and extract user-assistant pairs from parsed sessions.
This commit is contained in:
627
ccw/src/core/history-importer.ts
Normal file
627
ccw/src/core/history-importer.ts
Normal file
@@ -0,0 +1,627 @@
|
||||
/**
|
||||
* History Importer - Import Claude Code history into memory store
|
||||
* Supports global history.jsonl and project session JSONL files
|
||||
*
|
||||
* Usage:
|
||||
* ```typescript
|
||||
* const importer = new HistoryImporter('path/to/database.db');
|
||||
*
|
||||
* // Import global history (incremental)
|
||||
* const globalResult = await importer.importGlobalHistory();
|
||||
* console.log(`Imported ${globalResult.imported} entries`);
|
||||
*
|
||||
* // Import all sessions for a project
|
||||
* const projectResult = await importer.importProjectSessions('D--Claude-dms3');
|
||||
* console.log(`Imported ${projectResult.imported} messages from project sessions`);
|
||||
*
|
||||
* // Import specific session
|
||||
* const sessionResult = await importer.importSession('path/to/session.jsonl');
|
||||
*
|
||||
* // Get import status
|
||||
* const status = importer.getImportStatus();
|
||||
* console.log(`Total imported: ${status.totalImported}`);
|
||||
*
|
||||
* importer.close();
|
||||
* ```
|
||||
*/
|
||||
|
||||
import { createReadStream, existsSync, readdirSync, statSync } from 'fs';
|
||||
import { createInterface } from 'readline';
|
||||
import { join, basename } from 'path';
|
||||
import { createHash } from 'crypto';
|
||||
import Database from 'better-sqlite3';
|
||||
|
||||
// Type definitions
|
||||
interface GlobalHistoryEntry {
|
||||
display: string;
|
||||
pastedContents: object;
|
||||
timestamp: number;
|
||||
project: string;
|
||||
sessionId: string;
|
||||
}
|
||||
|
||||
interface SessionEntry {
|
||||
type: 'user' | 'assistant' | 'file-history-snapshot';
|
||||
message?: {
|
||||
role: 'user' | 'assistant';
|
||||
content: string | ContentBlock[];
|
||||
model?: string;
|
||||
usage?: {
|
||||
input_tokens: number;
|
||||
output_tokens: number;
|
||||
cache_creation_input_tokens?: number;
|
||||
cache_read_input_tokens?: number;
|
||||
};
|
||||
};
|
||||
sessionId: string;
|
||||
timestamp: string;
|
||||
cwd?: string;
|
||||
gitBranch?: string;
|
||||
todos?: any[];
|
||||
uuid?: string;
|
||||
parentUuid?: string;
|
||||
}
|
||||
|
||||
interface ContentBlock {
|
||||
type: 'text' | 'thinking' | 'tool_use' | 'tool_result';
|
||||
text?: string;
|
||||
thinking?: string;
|
||||
name?: string;
|
||||
input?: object;
|
||||
content?: string;
|
||||
id?: string;
|
||||
}
|
||||
|
||||
export interface ImportResult {
|
||||
imported: number;
|
||||
skipped: number;
|
||||
errors: number;
|
||||
}
|
||||
|
||||
export interface ImportStatus {
|
||||
lastGlobalImport?: string;
|
||||
lastSessionImport?: string;
|
||||
totalImported: number;
|
||||
sessions: Map<string, { imported: number; lastUpdate: string }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* History Importer for Claude Code
|
||||
*/
|
||||
export class HistoryImporter {
|
||||
private db: Database.Database;
|
||||
private status: ImportStatus;
|
||||
|
||||
constructor(dbPath: string) {
|
||||
this.db = new Database(dbPath);
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
this.status = {
|
||||
totalImported: 0,
|
||||
sessions: new Map()
|
||||
};
|
||||
this.initSchema();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database schema for conversation history
|
||||
*/
|
||||
private initSchema(): void {
|
||||
this.db.exec(`
|
||||
-- Conversations table
|
||||
CREATE TABLE IF NOT EXISTS conversations (
|
||||
id TEXT PRIMARY KEY,
|
||||
session_id TEXT NOT NULL,
|
||||
project_path TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
updated_at TEXT NOT NULL,
|
||||
message_count INTEGER DEFAULT 0,
|
||||
total_tokens INTEGER DEFAULT 0,
|
||||
metadata TEXT
|
||||
);
|
||||
|
||||
-- Messages table
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id TEXT PRIMARY KEY,
|
||||
conversation_id TEXT NOT NULL,
|
||||
parent_id TEXT,
|
||||
role TEXT NOT NULL,
|
||||
content TEXT NOT NULL,
|
||||
timestamp TEXT NOT NULL,
|
||||
model TEXT,
|
||||
input_tokens INTEGER DEFAULT 0,
|
||||
output_tokens INTEGER DEFAULT 0,
|
||||
cwd TEXT,
|
||||
git_branch TEXT,
|
||||
FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Tool calls table
|
||||
CREATE TABLE IF NOT EXISTS tool_calls (
|
||||
id TEXT PRIMARY KEY,
|
||||
message_id TEXT NOT NULL,
|
||||
tool_name TEXT NOT NULL,
|
||||
tool_input TEXT,
|
||||
tool_result TEXT,
|
||||
timestamp TEXT NOT NULL,
|
||||
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Import tracking table
|
||||
CREATE TABLE IF NOT EXISTS import_metadata (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT,
|
||||
updated_at TEXT
|
||||
);
|
||||
|
||||
-- Deduplication table (hash-based)
|
||||
CREATE TABLE IF NOT EXISTS message_hashes (
|
||||
hash TEXT PRIMARY KEY,
|
||||
message_id TEXT NOT NULL,
|
||||
created_at TEXT NOT NULL
|
||||
);
|
||||
|
||||
-- Indexes
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_session ON conversations(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_project ON conversations(project_path);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(conversation_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_timestamp ON messages(timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_message ON tool_calls(message_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_name ON tool_calls(tool_name);
|
||||
`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Import from global history.jsonl (incremental)
|
||||
*/
|
||||
async importGlobalHistory(historyPath?: string): Promise<ImportResult> {
|
||||
const path = historyPath || join(process.env.USERPROFILE || process.env.HOME || '', '.claude', 'history.jsonl');
|
||||
|
||||
if (!existsSync(path)) {
|
||||
return { imported: 0, skipped: 0, errors: 0 };
|
||||
}
|
||||
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
const lastImportTime = this.getLastImportTime('global_history');
|
||||
|
||||
const fileStream = createReadStream(path, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
const batch: GlobalHistoryEntry[] = [];
|
||||
const BATCH_SIZE = 100;
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
try {
|
||||
const entry: GlobalHistoryEntry = JSON.parse(line);
|
||||
|
||||
// Skip if already imported
|
||||
if (lastImportTime && entry.timestamp <= new Date(lastImportTime).getTime()) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
batch.push(entry);
|
||||
|
||||
if (batch.length >= BATCH_SIZE) {
|
||||
const batchResult = this.insertGlobalHistoryBatch(batch);
|
||||
result.imported += batchResult.imported;
|
||||
result.skipped += batchResult.skipped;
|
||||
result.errors += batchResult.errors;
|
||||
batch.length = 0;
|
||||
}
|
||||
} catch (err) {
|
||||
result.errors++;
|
||||
console.error(`Failed to parse line: ${(err as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Process remaining batch
|
||||
if (batch.length > 0) {
|
||||
const batchResult = this.insertGlobalHistoryBatch(batch);
|
||||
result.imported += batchResult.imported;
|
||||
result.skipped += batchResult.skipped;
|
||||
result.errors += batchResult.errors;
|
||||
}
|
||||
|
||||
if (result.imported > 0) {
|
||||
this.updateLastImportTime('global_history');
|
||||
}
|
||||
|
||||
this.status.lastGlobalImport = new Date().toISOString();
|
||||
this.status.totalImported += result.imported;
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Import full session from projects folder
|
||||
*/
|
||||
async importSession(sessionFilePath: string): Promise<ImportResult> {
|
||||
if (!existsSync(sessionFilePath)) {
|
||||
return { imported: 0, skipped: 0, errors: 0 };
|
||||
}
|
||||
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
const sessionId = basename(sessionFilePath, '.jsonl');
|
||||
|
||||
const fileStream = createReadStream(sessionFilePath, { encoding: 'utf8' });
|
||||
const rl = createInterface({ input: fileStream, crlfDelay: Infinity });
|
||||
|
||||
const messages: SessionEntry[] = [];
|
||||
let conversationMetadata: any = {};
|
||||
|
||||
for await (const line of rl) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
try {
|
||||
const entry: SessionEntry = JSON.parse(line);
|
||||
|
||||
if (entry.type === 'user' || entry.type === 'assistant') {
|
||||
messages.push(entry);
|
||||
|
||||
// Extract metadata from first message
|
||||
if (messages.length === 1) {
|
||||
conversationMetadata = {
|
||||
sessionId: entry.sessionId,
|
||||
cwd: entry.cwd,
|
||||
gitBranch: entry.gitBranch
|
||||
};
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
result.errors++;
|
||||
console.error(`Failed to parse session line: ${(err as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (messages.length > 0) {
|
||||
const importResult = this.insertSessionMessages(sessionId, messages, conversationMetadata);
|
||||
result.imported = importResult.imported;
|
||||
result.skipped = importResult.skipped;
|
||||
result.errors += importResult.errors;
|
||||
}
|
||||
|
||||
this.status.lastSessionImport = new Date().toISOString();
|
||||
this.status.totalImported += result.imported;
|
||||
this.status.sessions.set(sessionId, {
|
||||
imported: result.imported,
|
||||
lastUpdate: new Date().toISOString()
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scan and import all sessions for a project
|
||||
*/
|
||||
async importProjectSessions(projectName: string): Promise<ImportResult> {
|
||||
const projectsDir = join(process.env.USERPROFILE || process.env.HOME || '', '.claude', 'projects');
|
||||
const projectDir = join(projectsDir, projectName);
|
||||
|
||||
if (!existsSync(projectDir)) {
|
||||
return { imported: 0, skipped: 0, errors: 0 };
|
||||
}
|
||||
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
const sessionFiles = readdirSync(projectDir).filter(f => f.endsWith('.jsonl'));
|
||||
|
||||
for (const sessionFile of sessionFiles) {
|
||||
const sessionPath = join(projectDir, sessionFile);
|
||||
const sessionResult = await this.importSession(sessionPath);
|
||||
|
||||
result.imported += sessionResult.imported;
|
||||
result.skipped += sessionResult.skipped;
|
||||
result.errors += sessionResult.errors;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get import status
|
||||
*/
|
||||
getImportStatus(): ImportStatus {
|
||||
return { ...this.status };
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert global history batch
|
||||
*/
|
||||
private insertGlobalHistoryBatch(entries: GlobalHistoryEntry[]): ImportResult {
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
|
||||
const upsertConversation = this.db.prepare(`
|
||||
INSERT INTO conversations (id, session_id, project_path, created_at, updated_at, message_count, metadata)
|
||||
VALUES (@id, @session_id, @project_path, @created_at, @updated_at, 1, @metadata)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
message_count = message_count + 1
|
||||
`);
|
||||
|
||||
const upsertMessage = this.db.prepare(`
|
||||
INSERT INTO messages (id, conversation_id, role, content, timestamp, cwd)
|
||||
VALUES (@id, @conversation_id, 'user', @content, @timestamp, @cwd)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
`);
|
||||
|
||||
const insertHash = this.db.prepare(`
|
||||
INSERT OR IGNORE INTO message_hashes (hash, message_id, created_at)
|
||||
VALUES (@hash, @message_id, @created_at)
|
||||
`);
|
||||
|
||||
const transaction = this.db.transaction(() => {
|
||||
for (const entry of entries) {
|
||||
try {
|
||||
const timestamp = new Date(entry.timestamp).toISOString();
|
||||
const messageId = `${entry.sessionId}-${entry.timestamp}`;
|
||||
const hash = this.generateHash(entry.sessionId, timestamp, entry.display);
|
||||
|
||||
// Check if hash exists
|
||||
const existing = this.db.prepare('SELECT message_id FROM message_hashes WHERE hash = ?').get(hash);
|
||||
if (existing) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Insert conversation
|
||||
upsertConversation.run({
|
||||
id: entry.sessionId,
|
||||
session_id: entry.sessionId,
|
||||
project_path: entry.project,
|
||||
created_at: timestamp,
|
||||
updated_at: timestamp,
|
||||
metadata: JSON.stringify({ source: 'global_history' })
|
||||
});
|
||||
|
||||
// Insert message
|
||||
upsertMessage.run({
|
||||
id: messageId,
|
||||
conversation_id: entry.sessionId,
|
||||
content: entry.display,
|
||||
timestamp,
|
||||
cwd: entry.project
|
||||
});
|
||||
|
||||
// Insert hash
|
||||
insertHash.run({
|
||||
hash,
|
||||
message_id: messageId,
|
||||
created_at: timestamp
|
||||
});
|
||||
|
||||
result.imported++;
|
||||
} catch (err) {
|
||||
result.errors++;
|
||||
console.error(`Failed to insert entry: ${(err as Error).message}`);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
transaction();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert session messages
|
||||
*/
|
||||
private insertSessionMessages(
|
||||
sessionId: string,
|
||||
messages: SessionEntry[],
|
||||
metadata: any
|
||||
): ImportResult {
|
||||
const result: ImportResult = { imported: 0, skipped: 0, errors: 0 };
|
||||
|
||||
const upsertConversation = this.db.prepare(`
|
||||
INSERT INTO conversations (id, session_id, project_path, created_at, updated_at, message_count, total_tokens, metadata)
|
||||
VALUES (@id, @session_id, @project_path, @created_at, @updated_at, @message_count, @total_tokens, @metadata)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
message_count = @message_count,
|
||||
total_tokens = @total_tokens
|
||||
`);
|
||||
|
||||
const upsertMessage = this.db.prepare(`
|
||||
INSERT INTO messages (id, conversation_id, parent_id, role, content, timestamp, model, input_tokens, output_tokens, cwd, git_branch)
|
||||
VALUES (@id, @conversation_id, @parent_id, @role, @content, @timestamp, @model, @input_tokens, @output_tokens, @cwd, @git_branch)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
`);
|
||||
|
||||
const insertToolCall = this.db.prepare(`
|
||||
INSERT INTO tool_calls (id, message_id, tool_name, tool_input, tool_result, timestamp)
|
||||
VALUES (@id, @message_id, @tool_name, @tool_input, @tool_result, @timestamp)
|
||||
ON CONFLICT(id) DO NOTHING
|
||||
`);
|
||||
|
||||
const insertHash = this.db.prepare(`
|
||||
INSERT OR IGNORE INTO message_hashes (hash, message_id, created_at)
|
||||
VALUES (@hash, @message_id, @created_at)
|
||||
`);
|
||||
|
||||
const transaction = this.db.transaction(() => {
|
||||
let totalTokens = 0;
|
||||
const firstMessage = messages[0];
|
||||
const lastMessage = messages[messages.length - 1];
|
||||
|
||||
// Insert conversation FIRST (before messages, for foreign key constraint)
|
||||
upsertConversation.run({
|
||||
id: sessionId,
|
||||
session_id: sessionId,
|
||||
project_path: metadata.cwd || null,
|
||||
created_at: firstMessage.timestamp,
|
||||
updated_at: lastMessage.timestamp,
|
||||
message_count: 0,
|
||||
total_tokens: 0,
|
||||
metadata: JSON.stringify({ ...metadata, source: 'session_file' })
|
||||
});
|
||||
|
||||
for (const msg of messages) {
|
||||
if (!msg.message) continue;
|
||||
|
||||
try {
|
||||
const messageId = msg.uuid || `${sessionId}-${msg.timestamp}`;
|
||||
const content = this.extractTextContent(msg.message.content);
|
||||
const hash = this.generateHash(sessionId, msg.timestamp, content);
|
||||
|
||||
// Check if hash exists
|
||||
const existing = this.db.prepare('SELECT message_id FROM message_hashes WHERE hash = ?').get(hash);
|
||||
if (existing) {
|
||||
result.skipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate tokens
|
||||
const inputTokens = msg.message.usage?.input_tokens || 0;
|
||||
const outputTokens = msg.message.usage?.output_tokens || 0;
|
||||
totalTokens += inputTokens + outputTokens;
|
||||
|
||||
// Insert message
|
||||
upsertMessage.run({
|
||||
id: messageId,
|
||||
conversation_id: sessionId,
|
||||
parent_id: msg.parentUuid || null,
|
||||
role: msg.message.role,
|
||||
content,
|
||||
timestamp: msg.timestamp,
|
||||
model: msg.message.model || null,
|
||||
input_tokens: inputTokens,
|
||||
output_tokens: outputTokens,
|
||||
cwd: msg.cwd || metadata.cwd || null,
|
||||
git_branch: msg.gitBranch || metadata.gitBranch || null
|
||||
});
|
||||
|
||||
// Extract and insert tool calls
|
||||
const toolCalls = this.extractToolCalls(msg.message.content);
|
||||
for (const tool of toolCalls) {
|
||||
insertToolCall.run({
|
||||
id: tool.id || `${messageId}-${tool.name}`,
|
||||
message_id: messageId,
|
||||
tool_name: tool.name,
|
||||
tool_input: JSON.stringify(tool.input),
|
||||
tool_result: tool.result || null,
|
||||
timestamp: msg.timestamp
|
||||
});
|
||||
}
|
||||
|
||||
// Insert hash
|
||||
insertHash.run({
|
||||
hash,
|
||||
message_id: messageId,
|
||||
created_at: msg.timestamp
|
||||
});
|
||||
|
||||
result.imported++;
|
||||
} catch (err) {
|
||||
result.errors++;
|
||||
console.error(`Failed to insert message: ${(err as Error).message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Update conversation with final counts
|
||||
upsertConversation.run({
|
||||
id: sessionId,
|
||||
session_id: sessionId,
|
||||
project_path: metadata.cwd || null,
|
||||
created_at: firstMessage.timestamp,
|
||||
updated_at: lastMessage.timestamp,
|
||||
message_count: result.imported,
|
||||
total_tokens: totalTokens,
|
||||
metadata: JSON.stringify({ ...metadata, source: 'session_file' })
|
||||
});
|
||||
});
|
||||
|
||||
transaction();
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract text content from message content
|
||||
*/
|
||||
private extractTextContent(content: string | ContentBlock[]): string {
|
||||
if (typeof content === 'string') {
|
||||
return content;
|
||||
}
|
||||
|
||||
return content
|
||||
.filter(block => block.type === 'text' || block.type === 'thinking')
|
||||
.map(block => block.text || block.thinking || '')
|
||||
.join('\n\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract tool calls from content blocks
|
||||
*/
|
||||
private extractToolCalls(content: string | ContentBlock[]): Array<{
|
||||
id?: string;
|
||||
name: string;
|
||||
input?: object;
|
||||
result?: string;
|
||||
}> {
|
||||
if (typeof content === 'string') {
|
||||
return [];
|
||||
}
|
||||
|
||||
const toolCalls: Array<{ id?: string; name: string; input?: object; result?: string }> = [];
|
||||
const toolResultMap = new Map<string, string>();
|
||||
|
||||
// First pass: collect tool results
|
||||
for (const block of content) {
|
||||
if (block.type === 'tool_result' && block.id) {
|
||||
toolResultMap.set(block.id, block.content || '');
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: collect tool uses with their results
|
||||
for (const block of content) {
|
||||
if (block.type === 'tool_use' && block.name) {
|
||||
toolCalls.push({
|
||||
id: block.id,
|
||||
name: block.name,
|
||||
input: block.input,
|
||||
result: block.id ? toolResultMap.get(block.id) : undefined
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return toolCalls;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate SHA256 hash for deduplication
|
||||
*/
|
||||
private generateHash(sessionId: string, timestamp: string, content: string): string {
|
||||
const data = `${sessionId}:${timestamp}:${content}`;
|
||||
return createHash('sha256').update(data).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get last import time for a source
|
||||
*/
|
||||
private getLastImportTime(source: string): string | null {
|
||||
const result = this.db.prepare('SELECT value FROM import_metadata WHERE key = ?').get(source) as any;
|
||||
return result?.value || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update last import time
|
||||
*/
|
||||
private updateLastImportTime(source: string): void {
|
||||
const now = new Date().toISOString();
|
||||
this.db.prepare(`
|
||||
INSERT INTO import_metadata (key, value, updated_at)
|
||||
VALUES (@key, @value, @updated_at)
|
||||
ON CONFLICT(key) DO UPDATE SET value = @value, updated_at = @updated_at
|
||||
`).run({
|
||||
key: source,
|
||||
value: now,
|
||||
updated_at: now
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
702
ccw/src/core/memory-store.ts
Normal file
702
ccw/src/core/memory-store.ts
Normal file
@@ -0,0 +1,702 @@
|
||||
/**
|
||||
* Memory Store - SQLite Storage Backend
|
||||
* Provides persistent storage for memory module with entity tracking, associations, and conversation history
|
||||
*/
|
||||
|
||||
import Database from 'better-sqlite3';
|
||||
import { existsSync, mkdirSync } from 'fs';
|
||||
import { join } from 'path';
|
||||
|
||||
// Types
|
||||
export interface Entity {
|
||||
id?: number;
|
||||
type: 'file' | 'module' | 'topic' | 'url';
|
||||
value: string;
|
||||
normalized_value: string;
|
||||
first_seen_at: string;
|
||||
last_seen_at: string;
|
||||
metadata?: string;
|
||||
}
|
||||
|
||||
export interface AccessLog {
|
||||
id?: number;
|
||||
entity_id: number;
|
||||
action: 'read' | 'write' | 'mention';
|
||||
session_id?: string;
|
||||
timestamp: string;
|
||||
context_summary?: string;
|
||||
}
|
||||
|
||||
export interface EntityStats {
|
||||
entity_id: number;
|
||||
read_count: number;
|
||||
write_count: number;
|
||||
mention_count: number;
|
||||
heat_score: number;
|
||||
}
|
||||
|
||||
export interface Association {
|
||||
source_id: number;
|
||||
target_id: number;
|
||||
weight: number;
|
||||
last_interaction_at?: string;
|
||||
}
|
||||
|
||||
export interface PromptHistory {
|
||||
id?: number;
|
||||
session_id: string;
|
||||
project_path?: string;
|
||||
prompt_text?: string;
|
||||
context_summary?: string;
|
||||
timestamp: number;
|
||||
hash?: string;
|
||||
quality_score?: number;
|
||||
intent_label?: string;
|
||||
}
|
||||
|
||||
export interface PromptPattern {
|
||||
id?: number;
|
||||
pattern_type?: string;
|
||||
frequency: number;
|
||||
example_ids?: string;
|
||||
last_detected?: number;
|
||||
}
|
||||
|
||||
export interface Conversation {
|
||||
id: string;
|
||||
source?: string;
|
||||
external_id?: string;
|
||||
project_name?: string;
|
||||
git_branch?: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
quality_score?: number;
|
||||
turn_count: number;
|
||||
prompt_preview?: string;
|
||||
}
|
||||
|
||||
export interface Message {
|
||||
id?: number;
|
||||
conversation_id: string;
|
||||
role: 'user' | 'assistant' | 'system';
|
||||
content_text?: string;
|
||||
content_json?: string;
|
||||
timestamp: string;
|
||||
token_count?: number;
|
||||
}
|
||||
|
||||
export interface ToolCall {
|
||||
id?: number;
|
||||
message_id: number;
|
||||
tool_name: string;
|
||||
tool_args?: string;
|
||||
tool_output?: string;
|
||||
status?: string;
|
||||
duration_ms?: number;
|
||||
}
|
||||
|
||||
export interface HotEntity extends Entity {
|
||||
stats: EntityStats;
|
||||
}
|
||||
|
||||
export interface EntityWithAssociations extends Entity {
|
||||
associations: Array<{
|
||||
target: Entity;
|
||||
weight: number;
|
||||
last_interaction_at?: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Memory Store using SQLite
|
||||
*/
|
||||
export class MemoryStore {
|
||||
private db: Database.Database;
|
||||
private dbPath: string;
|
||||
|
||||
constructor(projectPath: string) {
|
||||
const memoryDir = join(projectPath, '.workflow', '.memory');
|
||||
if (!existsSync(memoryDir)) {
|
||||
mkdirSync(memoryDir, { recursive: true });
|
||||
}
|
||||
|
||||
this.dbPath = join(memoryDir, 'memory.db');
|
||||
this.db = new Database(this.dbPath);
|
||||
this.db.pragma('journal_mode = WAL');
|
||||
this.db.pragma('synchronous = NORMAL');
|
||||
|
||||
this.initDatabase();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize database schema
|
||||
*/
|
||||
private initDatabase(): void {
|
||||
this.db.exec(`
|
||||
-- Entity table
|
||||
CREATE TABLE IF NOT EXISTS entities (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
type TEXT NOT NULL,
|
||||
value TEXT NOT NULL,
|
||||
normalized_value TEXT NOT NULL,
|
||||
first_seen_at TEXT NOT NULL,
|
||||
last_seen_at TEXT NOT NULL,
|
||||
metadata TEXT,
|
||||
UNIQUE(type, normalized_value)
|
||||
);
|
||||
|
||||
-- Access logs table
|
||||
CREATE TABLE IF NOT EXISTS access_logs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
entity_id INTEGER NOT NULL,
|
||||
action TEXT NOT NULL,
|
||||
session_id TEXT,
|
||||
timestamp TEXT NOT NULL,
|
||||
context_summary TEXT,
|
||||
FOREIGN KEY (entity_id) REFERENCES entities(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Entity statistics table
|
||||
CREATE TABLE IF NOT EXISTS entity_stats (
|
||||
entity_id INTEGER PRIMARY KEY,
|
||||
read_count INTEGER DEFAULT 0,
|
||||
write_count INTEGER DEFAULT 0,
|
||||
mention_count INTEGER DEFAULT 0,
|
||||
heat_score REAL DEFAULT 0,
|
||||
FOREIGN KEY (entity_id) REFERENCES entities(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Associations table
|
||||
CREATE TABLE IF NOT EXISTS associations (
|
||||
source_id INTEGER NOT NULL,
|
||||
target_id INTEGER NOT NULL,
|
||||
weight INTEGER DEFAULT 0,
|
||||
last_interaction_at TEXT,
|
||||
PRIMARY KEY (source_id, target_id),
|
||||
FOREIGN KEY (source_id) REFERENCES entities(id) ON DELETE CASCADE,
|
||||
FOREIGN KEY (target_id) REFERENCES entities(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Prompt history table
|
||||
CREATE TABLE IF NOT EXISTS prompt_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id TEXT NOT NULL,
|
||||
project_path TEXT,
|
||||
prompt_text TEXT,
|
||||
context_summary TEXT,
|
||||
timestamp INTEGER,
|
||||
hash TEXT UNIQUE,
|
||||
quality_score INTEGER,
|
||||
intent_label TEXT
|
||||
);
|
||||
|
||||
-- Prompt patterns table
|
||||
CREATE TABLE IF NOT EXISTS prompt_patterns (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pattern_type TEXT,
|
||||
frequency INTEGER,
|
||||
example_ids TEXT,
|
||||
last_detected INTEGER
|
||||
);
|
||||
|
||||
-- Conversations table
|
||||
CREATE TABLE IF NOT EXISTS conversations (
|
||||
id TEXT PRIMARY KEY,
|
||||
source TEXT DEFAULT 'ccw',
|
||||
external_id TEXT,
|
||||
project_name TEXT,
|
||||
git_branch TEXT,
|
||||
created_at TEXT,
|
||||
updated_at TEXT,
|
||||
quality_score INTEGER,
|
||||
turn_count INTEGER,
|
||||
prompt_preview TEXT
|
||||
);
|
||||
|
||||
-- Messages table
|
||||
CREATE TABLE IF NOT EXISTS messages (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
conversation_id TEXT NOT NULL,
|
||||
role TEXT NOT NULL,
|
||||
content_text TEXT,
|
||||
content_json TEXT,
|
||||
timestamp TEXT NOT NULL,
|
||||
token_count INTEGER,
|
||||
FOREIGN KEY (conversation_id) REFERENCES conversations(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Tool calls table
|
||||
CREATE TABLE IF NOT EXISTS tool_calls (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
message_id INTEGER NOT NULL,
|
||||
tool_name TEXT NOT NULL,
|
||||
tool_args TEXT,
|
||||
tool_output TEXT,
|
||||
status TEXT,
|
||||
duration_ms INTEGER,
|
||||
FOREIGN KEY (message_id) REFERENCES messages(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
-- Indexes for efficient queries
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_type ON entities(type);
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_normalized ON entities(normalized_value);
|
||||
CREATE INDEX IF NOT EXISTS idx_entities_last_seen ON entities(last_seen_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_access_logs_entity ON access_logs(entity_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_access_logs_timestamp ON access_logs(timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_access_logs_session ON access_logs(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_entity_stats_heat ON entity_stats(heat_score DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_associations_source ON associations(source_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_associations_target ON associations(target_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_prompt_history_session ON prompt_history(session_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_prompt_history_timestamp ON prompt_history(timestamp DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_created ON conversations(created_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_conversations_updated ON conversations(updated_at DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_messages_conversation ON messages(conversation_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_tool_calls_message ON tool_calls(message_id);
|
||||
|
||||
-- Full-text search for prompt history
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS prompt_history_fts USING fts5(
|
||||
prompt_text,
|
||||
context_summary,
|
||||
content='prompt_history',
|
||||
content_rowid='id'
|
||||
);
|
||||
|
||||
-- Triggers to keep FTS index updated
|
||||
CREATE TRIGGER IF NOT EXISTS prompt_history_ai AFTER INSERT ON prompt_history BEGIN
|
||||
INSERT INTO prompt_history_fts(rowid, prompt_text, context_summary)
|
||||
VALUES (new.id, new.prompt_text, new.context_summary);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS prompt_history_ad AFTER DELETE ON prompt_history BEGIN
|
||||
INSERT INTO prompt_history_fts(prompt_history_fts, rowid, prompt_text, context_summary)
|
||||
VALUES('delete', old.id, old.prompt_text, old.context_summary);
|
||||
END;
|
||||
|
||||
CREATE TRIGGER IF NOT EXISTS prompt_history_au AFTER UPDATE ON prompt_history BEGIN
|
||||
INSERT INTO prompt_history_fts(prompt_history_fts, rowid, prompt_text, context_summary)
|
||||
VALUES('delete', old.id, old.prompt_text, old.context_summary);
|
||||
INSERT INTO prompt_history_fts(rowid, prompt_text, context_summary)
|
||||
VALUES (new.id, new.prompt_text, new.context_summary);
|
||||
END;
|
||||
`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Upsert an entity
|
||||
*/
|
||||
upsertEntity(entity: Entity): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO entities (type, value, normalized_value, first_seen_at, last_seen_at, metadata)
|
||||
VALUES (@type, @value, @normalized_value, @first_seen_at, @last_seen_at, @metadata)
|
||||
ON CONFLICT(type, normalized_value) DO UPDATE SET
|
||||
value = @value,
|
||||
last_seen_at = @last_seen_at,
|
||||
metadata = @metadata
|
||||
RETURNING id
|
||||
`);
|
||||
|
||||
const result = stmt.get({
|
||||
type: entity.type,
|
||||
value: entity.value,
|
||||
normalized_value: entity.normalized_value,
|
||||
first_seen_at: entity.first_seen_at,
|
||||
last_seen_at: entity.last_seen_at,
|
||||
metadata: entity.metadata || null
|
||||
}) as { id: number };
|
||||
|
||||
return result.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entity by type and normalized value
|
||||
*/
|
||||
getEntity(type: string, normalizedValue: string): Entity | null {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM entities WHERE type = ? AND normalized_value = ?
|
||||
`);
|
||||
return stmt.get(type, normalizedValue) as Entity | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entity by ID
|
||||
*/
|
||||
getEntityById(id: number): Entity | null {
|
||||
const stmt = this.db.prepare(`SELECT * FROM entities WHERE id = ?`);
|
||||
return stmt.get(id) as Entity | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get hot entities (by heat score)
|
||||
*/
|
||||
getHotEntities(limit: number = 20): HotEntity[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT e.*, s.read_count, s.write_count, s.mention_count, s.heat_score
|
||||
FROM entities e
|
||||
INNER JOIN entity_stats s ON e.id = s.entity_id
|
||||
ORDER BY s.heat_score DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
|
||||
const rows = stmt.all(limit) as any[];
|
||||
return rows.map(row => ({
|
||||
id: row.id,
|
||||
type: row.type,
|
||||
value: row.value,
|
||||
normalized_value: row.normalized_value,
|
||||
first_seen_at: row.first_seen_at,
|
||||
last_seen_at: row.last_seen_at,
|
||||
metadata: row.metadata,
|
||||
stats: {
|
||||
entity_id: row.id,
|
||||
read_count: row.read_count,
|
||||
write_count: row.write_count,
|
||||
mention_count: row.mention_count,
|
||||
heat_score: row.heat_score
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Log entity access
|
||||
*/
|
||||
logAccess(log: AccessLog): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO access_logs (entity_id, action, session_id, timestamp, context_summary)
|
||||
VALUES (@entity_id, @action, @session_id, @timestamp, @context_summary)
|
||||
`);
|
||||
|
||||
stmt.run({
|
||||
entity_id: log.entity_id,
|
||||
action: log.action,
|
||||
session_id: log.session_id || null,
|
||||
timestamp: log.timestamp,
|
||||
context_summary: log.context_summary || null
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get recent access logs for an entity
|
||||
*/
|
||||
getRecentAccess(entityId: number, limit: number = 50): AccessLog[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM access_logs
|
||||
WHERE entity_id = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(entityId, limit) as AccessLog[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Update entity statistics
|
||||
*/
|
||||
updateStats(entityId: number, action: 'read' | 'write' | 'mention'): void {
|
||||
const upsertStmt = this.db.prepare(`
|
||||
INSERT INTO entity_stats (entity_id, read_count, write_count, mention_count, heat_score)
|
||||
VALUES (@entity_id, 0, 0, 0, 0)
|
||||
ON CONFLICT(entity_id) DO NOTHING
|
||||
`);
|
||||
|
||||
upsertStmt.run({ entity_id: entityId });
|
||||
|
||||
const field = `${action}_count`;
|
||||
const updateStmt = this.db.prepare(`
|
||||
UPDATE entity_stats
|
||||
SET ${field} = ${field} + 1
|
||||
WHERE entity_id = ?
|
||||
`);
|
||||
|
||||
updateStmt.run(entityId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get entity statistics
|
||||
*/
|
||||
getStats(entityId: number): EntityStats | null {
|
||||
const stmt = this.db.prepare(`SELECT * FROM entity_stats WHERE entity_id = ?`);
|
||||
return stmt.get(entityId) as EntityStats | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate and update heat score for an entity
|
||||
*/
|
||||
calculateHeatScore(entityId: number): number {
|
||||
const stats = this.getStats(entityId);
|
||||
if (!stats) return 0;
|
||||
|
||||
const now = Date.now();
|
||||
const logs = this.getRecentAccess(entityId, 100);
|
||||
|
||||
let recencyScore = 0;
|
||||
for (const log of logs) {
|
||||
const ageMs = now - new Date(log.timestamp).getTime();
|
||||
const ageDays = ageMs / (1000 * 60 * 60 * 24);
|
||||
const decay = Math.exp(-ageDays / 7); // 7-day half-life
|
||||
recencyScore += decay;
|
||||
}
|
||||
|
||||
const heatScore = (
|
||||
stats.read_count * 1 +
|
||||
stats.write_count * 3 +
|
||||
stats.mention_count * 2 +
|
||||
recencyScore * 5
|
||||
);
|
||||
|
||||
const updateStmt = this.db.prepare(`
|
||||
UPDATE entity_stats SET heat_score = ? WHERE entity_id = ?
|
||||
`);
|
||||
updateStmt.run(heatScore, entityId);
|
||||
|
||||
return heatScore;
|
||||
}
|
||||
|
||||
/**
|
||||
* Record association between entities
|
||||
*/
|
||||
recordAssociation(sourceId: number, targetId: number, timestamp?: string): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO associations (source_id, target_id, weight, last_interaction_at)
|
||||
VALUES (@source_id, @target_id, 1, @last_interaction_at)
|
||||
ON CONFLICT(source_id, target_id) DO UPDATE SET
|
||||
weight = weight + 1,
|
||||
last_interaction_at = @last_interaction_at
|
||||
`);
|
||||
|
||||
stmt.run({
|
||||
source_id: sourceId,
|
||||
target_id: targetId,
|
||||
last_interaction_at: timestamp || new Date().toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get associations for an entity
|
||||
*/
|
||||
getAssociations(entityId: number, limit: number = 20): EntityWithAssociations['associations'] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT e.*, a.weight, a.last_interaction_at
|
||||
FROM associations a
|
||||
INNER JOIN entities e ON a.target_id = e.id
|
||||
WHERE a.source_id = ?
|
||||
ORDER BY a.weight DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
|
||||
const rows = stmt.all(entityId, limit) as any[];
|
||||
return rows.map(row => ({
|
||||
target: {
|
||||
id: row.id,
|
||||
type: row.type,
|
||||
value: row.value,
|
||||
normalized_value: row.normalized_value,
|
||||
first_seen_at: row.first_seen_at,
|
||||
last_seen_at: row.last_seen_at,
|
||||
metadata: row.metadata
|
||||
},
|
||||
weight: row.weight,
|
||||
last_interaction_at: row.last_interaction_at
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Save prompt to history
|
||||
*/
|
||||
savePrompt(prompt: PromptHistory): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO prompt_history (session_id, project_path, prompt_text, context_summary, timestamp, hash, quality_score, intent_label)
|
||||
VALUES (@session_id, @project_path, @prompt_text, @context_summary, @timestamp, @hash, @quality_score, @intent_label)
|
||||
ON CONFLICT(hash) DO UPDATE SET
|
||||
quality_score = @quality_score,
|
||||
intent_label = @intent_label
|
||||
RETURNING id
|
||||
`);
|
||||
|
||||
const result = stmt.get({
|
||||
session_id: prompt.session_id,
|
||||
project_path: prompt.project_path || null,
|
||||
prompt_text: prompt.prompt_text || null,
|
||||
context_summary: prompt.context_summary || null,
|
||||
timestamp: prompt.timestamp,
|
||||
hash: prompt.hash || null,
|
||||
quality_score: prompt.quality_score || null,
|
||||
intent_label: prompt.intent_label || null
|
||||
}) as { id: number };
|
||||
|
||||
return result.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get prompt history for a session
|
||||
*/
|
||||
getPromptHistory(sessionId: string, limit: number = 50): PromptHistory[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM prompt_history
|
||||
WHERE session_id = ?
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(sessionId, limit) as PromptHistory[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Search prompts by text
|
||||
*/
|
||||
searchPrompts(query: string, limit: number = 20): PromptHistory[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT ph.* FROM prompt_history ph
|
||||
INNER JOIN prompt_history_fts fts ON fts.rowid = ph.id
|
||||
WHERE prompt_history_fts MATCH ?
|
||||
ORDER BY ph.timestamp DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
return stmt.all(query, limit) as PromptHistory[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Save or update a conversation
|
||||
*/
|
||||
saveConversation(conversation: Conversation): void {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO conversations (id, source, external_id, project_name, git_branch, created_at, updated_at, quality_score, turn_count, prompt_preview)
|
||||
VALUES (@id, @source, @external_id, @project_name, @git_branch, @created_at, @updated_at, @quality_score, @turn_count, @prompt_preview)
|
||||
ON CONFLICT(id) DO UPDATE SET
|
||||
updated_at = @updated_at,
|
||||
quality_score = @quality_score,
|
||||
turn_count = @turn_count,
|
||||
prompt_preview = @prompt_preview
|
||||
`);
|
||||
|
||||
stmt.run({
|
||||
id: conversation.id,
|
||||
source: conversation.source || 'ccw',
|
||||
external_id: conversation.external_id || null,
|
||||
project_name: conversation.project_name || null,
|
||||
git_branch: conversation.git_branch || null,
|
||||
created_at: conversation.created_at,
|
||||
updated_at: conversation.updated_at,
|
||||
quality_score: conversation.quality_score || null,
|
||||
turn_count: conversation.turn_count,
|
||||
prompt_preview: conversation.prompt_preview || null
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversations
|
||||
*/
|
||||
getConversations(limit: number = 50, offset: number = 0): Conversation[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM conversations
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
`);
|
||||
return stmt.all(limit, offset) as Conversation[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get conversation by ID
|
||||
*/
|
||||
getConversation(id: string): Conversation | null {
|
||||
const stmt = this.db.prepare(`SELECT * FROM conversations WHERE id = ?`);
|
||||
return stmt.get(id) as Conversation | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save message
|
||||
*/
|
||||
saveMessage(message: Message): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO messages (conversation_id, role, content_text, content_json, timestamp, token_count)
|
||||
VALUES (@conversation_id, @role, @content_text, @content_json, @timestamp, @token_count)
|
||||
RETURNING id
|
||||
`);
|
||||
|
||||
const result = stmt.get({
|
||||
conversation_id: message.conversation_id,
|
||||
role: message.role,
|
||||
content_text: message.content_text || null,
|
||||
content_json: message.content_json || null,
|
||||
timestamp: message.timestamp,
|
||||
token_count: message.token_count || null
|
||||
}) as { id: number };
|
||||
|
||||
return result.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get messages for a conversation
|
||||
*/
|
||||
getMessages(conversationId: string): Message[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM messages
|
||||
WHERE conversation_id = ?
|
||||
ORDER BY timestamp ASC
|
||||
`);
|
||||
return stmt.all(conversationId) as Message[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Save tool call
|
||||
*/
|
||||
saveToolCall(toolCall: ToolCall): number {
|
||||
const stmt = this.db.prepare(`
|
||||
INSERT INTO tool_calls (message_id, tool_name, tool_args, tool_output, status, duration_ms)
|
||||
VALUES (@message_id, @tool_name, @tool_args, @tool_output, @status, @duration_ms)
|
||||
RETURNING id
|
||||
`);
|
||||
|
||||
const result = stmt.get({
|
||||
message_id: toolCall.message_id,
|
||||
tool_name: toolCall.tool_name,
|
||||
tool_args: toolCall.tool_args || null,
|
||||
tool_output: toolCall.tool_output || null,
|
||||
status: toolCall.status || null,
|
||||
duration_ms: toolCall.duration_ms || null
|
||||
}) as { id: number };
|
||||
|
||||
return result.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool calls for a message
|
||||
*/
|
||||
getToolCalls(messageId: number): ToolCall[] {
|
||||
const stmt = this.db.prepare(`
|
||||
SELECT * FROM tool_calls
|
||||
WHERE message_id = ?
|
||||
`);
|
||||
return stmt.all(messageId) as ToolCall[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Close database connection
|
||||
*/
|
||||
close(): void {
|
||||
this.db.close();
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance cache
|
||||
const storeCache = new Map<string, MemoryStore>();
|
||||
|
||||
/**
|
||||
* Get or create a store instance for a project
|
||||
*/
|
||||
export function getMemoryStore(projectPath: string): MemoryStore {
|
||||
if (!storeCache.has(projectPath)) {
|
||||
storeCache.set(projectPath, new MemoryStore(projectPath));
|
||||
}
|
||||
return storeCache.get(projectPath)!;
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all store instances
|
||||
*/
|
||||
export function closeAllStores(): void {
|
||||
for (const store of storeCache.values()) {
|
||||
store.close();
|
||||
}
|
||||
storeCache.clear();
|
||||
}
|
||||
|
||||
export default MemoryStore;
|
||||
@@ -8,11 +8,12 @@ import { createHash } from 'crypto';
|
||||
import { scanSessions } from './session-scanner.js';
|
||||
import { aggregateData } from './data-aggregator.js';
|
||||
import { resolvePath, getRecentPaths, trackRecentPath, removeRecentPath, normalizePathForDisplay, getWorkflowDir } from '../utils/path-resolver.js';
|
||||
import { getCliToolsStatus, getExecutionHistory, getExecutionHistoryAsync, getExecutionDetail, getConversationDetail, deleteExecution, deleteExecutionAsync, batchDeleteExecutionsAsync, executeCliTool } from '../tools/cli-executor.js';
|
||||
import { getCliToolsStatus, getExecutionHistory, getExecutionHistoryAsync, getExecutionDetail, getConversationDetail, deleteExecution, deleteExecutionAsync, batchDeleteExecutionsAsync, executeCliTool, getNativeSessionContent, getFormattedNativeConversation, getEnrichedConversation, getHistoryWithNativeInfo } from '../tools/cli-executor.js';
|
||||
import { getAllManifests } from './manifest.js';
|
||||
import { checkVenvStatus, bootstrapVenv, executeCodexLens, checkSemanticStatus, installSemantic } from '../tools/codex-lens.js';
|
||||
import { generateSmartContext, formatSmartContext } from '../tools/smart-context.js';
|
||||
import { listTools } from '../tools/index.js';
|
||||
import { getMemoryStore } from './memory-store.js';
|
||||
import type { ServerConfig } from '../types/config.js';interface ServerOptions { port?: number; initialPath?: string; host?: string; open?: boolean;}interface PostResult { error?: string; status?: number; [key: string]: unknown;}type PostHandler = (body: unknown) => Promise<PostResult>;
|
||||
|
||||
// Claude config file paths
|
||||
@@ -54,7 +55,9 @@ const MODULE_CSS_FILES = [
|
||||
'07-managers.css',
|
||||
'08-review.css',
|
||||
'09-explorer.css',
|
||||
'10-cli.css'
|
||||
'10-cli.css',
|
||||
'11-memory.css',
|
||||
'11-prompt-history.css'
|
||||
];
|
||||
|
||||
/**
|
||||
@@ -121,6 +124,8 @@ const MODULE_FILES = [
|
||||
'views/cli-manager.js',
|
||||
'views/history.js',
|
||||
'views/explorer.js',
|
||||
'views/memory.js',
|
||||
'views/prompt-history.js',
|
||||
'main.js'
|
||||
];
|
||||
/**
|
||||
@@ -643,11 +648,12 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||
const tool = url.searchParams.get('tool') || null;
|
||||
const status = url.searchParams.get('status') || null;
|
||||
const category = url.searchParams.get('category') as 'user' | 'internal' | 'insight' | null;
|
||||
const search = url.searchParams.get('search') || null;
|
||||
const recursive = url.searchParams.get('recursive') !== 'false'; // Default true
|
||||
|
||||
// Use async version to ensure SQLite is initialized
|
||||
getExecutionHistoryAsync(projectPath, { limit, tool, status, search, recursive })
|
||||
getExecutionHistoryAsync(projectPath, { limit, tool, status, category, search, recursive })
|
||||
.then(history => {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(history));
|
||||
@@ -718,6 +724,100 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Get Native Session Content (full conversation from native session file)
|
||||
if (pathname === '/api/cli/native-session') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const executionId = url.searchParams.get('id');
|
||||
const format = url.searchParams.get('format') || 'json'; // json, text, pairs
|
||||
|
||||
if (!executionId) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Execution ID is required' }));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let result;
|
||||
if (format === 'text') {
|
||||
// Get formatted text representation
|
||||
result = await getFormattedNativeConversation(projectPath, executionId, {
|
||||
includeThoughts: url.searchParams.get('thoughts') === 'true',
|
||||
includeToolCalls: url.searchParams.get('tools') === 'true',
|
||||
includeTokens: url.searchParams.get('tokens') === 'true'
|
||||
});
|
||||
} else if (format === 'pairs') {
|
||||
// Get simple prompt/response pairs
|
||||
const enriched = await getEnrichedConversation(projectPath, executionId);
|
||||
result = enriched?.merged || null;
|
||||
} else {
|
||||
// Get full parsed session data
|
||||
result = await getNativeSessionContent(projectPath, executionId);
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Native session not found' }));
|
||||
return;
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': format === 'text' ? 'text/plain' : 'application/json' });
|
||||
res.end(format === 'text' ? result : JSON.stringify(result));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (err as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Get Enriched Conversation (CCW + Native merged)
|
||||
if (pathname === '/api/cli/enriched') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const executionId = url.searchParams.get('id');
|
||||
|
||||
if (!executionId) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Execution ID is required' }));
|
||||
return;
|
||||
}
|
||||
|
||||
getEnrichedConversation(projectPath, executionId)
|
||||
.then(result => {
|
||||
if (!result) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Conversation not found' }));
|
||||
return;
|
||||
}
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
})
|
||||
.catch(err => {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (err as Error).message }));
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Get History with Native Session Info
|
||||
if (pathname === '/api/cli/history-native') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||
const tool = url.searchParams.get('tool') || null;
|
||||
const status = url.searchParams.get('status') || null;
|
||||
const category = url.searchParams.get('category') as 'user' | 'internal' | 'insight' | null;
|
||||
const search = url.searchParams.get('search') || null;
|
||||
|
||||
getHistoryWithNativeInfo(projectPath, { limit, tool, status, category, search })
|
||||
.then(history => {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(history));
|
||||
})
|
||||
.catch(err => {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (err as Error).message }));
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Execute CLI Tool
|
||||
if (pathname === '/api/cli/execute' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
@@ -817,6 +917,534 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get hotspot statistics
|
||||
if (pathname === '/api/memory/stats') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
const type = url.searchParams.get('type') || null;
|
||||
const sort = url.searchParams.get('sort') || 'heat';
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
let hotEntities = memoryStore.getHotEntities(limit);
|
||||
|
||||
// Filter by type if specified
|
||||
if (type) {
|
||||
hotEntities = hotEntities.filter(e => e.type === type);
|
||||
}
|
||||
|
||||
// Sort by field
|
||||
if (sort === 'reads') {
|
||||
hotEntities.sort((a, b) => b.stats.read_count - a.stats.read_count);
|
||||
} else if (sort === 'writes') {
|
||||
hotEntities.sort((a, b) => b.stats.write_count - a.stats.write_count);
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
items: hotEntities.map(e => ({
|
||||
value: e.value,
|
||||
type: e.type,
|
||||
read_count: e.stats.read_count,
|
||||
write_count: e.stats.write_count,
|
||||
mention_count: e.stats.mention_count,
|
||||
heat_score: e.stats.heat_score
|
||||
}))
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get association graph
|
||||
if (pathname === '/api/memory/graph') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const center = url.searchParams.get('center');
|
||||
const depth = parseInt(url.searchParams.get('depth') || '1', 10);
|
||||
|
||||
if (!center) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'center parameter is required' }));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
|
||||
// Find the center entity (assume it's a file for now)
|
||||
const entity = memoryStore.getEntity('file', center);
|
||||
if (!entity) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Entity not found' }));
|
||||
return;
|
||||
}
|
||||
|
||||
// Get associations
|
||||
const associations = memoryStore.getAssociations(entity.id!, 20);
|
||||
const stats = memoryStore.getStats(entity.id!);
|
||||
|
||||
// Build graph structure
|
||||
const nodes = [
|
||||
{
|
||||
id: entity.id!.toString(),
|
||||
label: entity.value,
|
||||
type: entity.type,
|
||||
heat: stats?.heat_score || 0
|
||||
}
|
||||
];
|
||||
|
||||
const links = [];
|
||||
for (const assoc of associations) {
|
||||
nodes.push({
|
||||
id: assoc.target.id!.toString(),
|
||||
label: assoc.target.value,
|
||||
type: assoc.target.type,
|
||||
heat: 0
|
||||
});
|
||||
|
||||
links.push({
|
||||
source: entity.id!.toString(),
|
||||
target: assoc.target.id!.toString(),
|
||||
weight: assoc.weight
|
||||
});
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ nodes, links }));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Track entity access
|
||||
if (pathname === '/api/memory/track' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { type, action, value, sessionId, metadata, path: projectPath } = body;
|
||||
|
||||
if (!type || !action || !value) {
|
||||
return { error: 'type, action, and value are required', status: 400 };
|
||||
}
|
||||
|
||||
const basePath = projectPath || initialPath;
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(basePath);
|
||||
const now = new Date().toISOString();
|
||||
|
||||
// Normalize the value
|
||||
const normalizedValue = value.toLowerCase().trim();
|
||||
|
||||
// Upsert entity
|
||||
const entityId = memoryStore.upsertEntity({
|
||||
type,
|
||||
value,
|
||||
normalized_value: normalizedValue,
|
||||
first_seen_at: now,
|
||||
last_seen_at: now,
|
||||
metadata: metadata ? JSON.stringify(metadata) : undefined
|
||||
});
|
||||
|
||||
// Log access
|
||||
memoryStore.logAccess({
|
||||
entity_id: entityId,
|
||||
action,
|
||||
session_id: sessionId,
|
||||
timestamp: now,
|
||||
context_summary: metadata?.context
|
||||
});
|
||||
|
||||
// Update stats
|
||||
memoryStore.updateStats(entityId, action);
|
||||
|
||||
// Calculate new heat score
|
||||
const heatScore = memoryStore.calculateHeatScore(entityId);
|
||||
const stats = memoryStore.getStats(entityId);
|
||||
|
||||
// Broadcast MEMORY_UPDATED event via WebSocket
|
||||
broadcastToClients({
|
||||
type: 'MEMORY_UPDATED',
|
||||
payload: {
|
||||
entity: { id: entityId, type, value },
|
||||
stats: {
|
||||
read_count: stats?.read_count || 0,
|
||||
write_count: stats?.write_count || 0,
|
||||
mention_count: stats?.mention_count || 0,
|
||||
heat_score: heatScore
|
||||
},
|
||||
timestamp: now
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
entity_id: entityId,
|
||||
heat_score: heatScore
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
return { error: (error as Error).message, status: 500 };
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get native Claude history from ~/.claude/history.jsonl
|
||||
if (pathname === '/api/memory/native-history') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '100', 10);
|
||||
const historyFile = join(homedir(), '.claude', 'history.jsonl');
|
||||
|
||||
try {
|
||||
if (!existsSync(historyFile)) {
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ prompts: [], total: 0, message: 'No history file found' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const content = readFileSync(historyFile, 'utf8');
|
||||
const lines = content.trim().split('\n').filter(line => line.trim());
|
||||
const allPrompts = [];
|
||||
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const entry = JSON.parse(line);
|
||||
// Filter by project if specified
|
||||
if (projectPath && entry.project) {
|
||||
const normalizedProject = entry.project.replace(/\\/g, '/').toLowerCase();
|
||||
const normalizedPath = projectPath.replace(/\\/g, '/').toLowerCase();
|
||||
if (!normalizedProject.includes(normalizedPath) && !normalizedPath.includes(normalizedProject)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
allPrompts.push({
|
||||
id: `${entry.sessionId}-${entry.timestamp}`,
|
||||
text: entry.display || '',
|
||||
timestamp: new Date(entry.timestamp).toISOString(),
|
||||
project: entry.project || '',
|
||||
session_id: entry.sessionId || '',
|
||||
pasted_contents: entry.pastedContents || {},
|
||||
// Derive intent from content keywords
|
||||
intent: derivePromptIntent(entry.display || ''),
|
||||
quality_score: calculateQualityScore(entry.display || '')
|
||||
});
|
||||
} catch (parseError) {
|
||||
// Skip malformed lines
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by timestamp descending
|
||||
allPrompts.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
|
||||
|
||||
// Apply limit
|
||||
const prompts = allPrompts.slice(0, limit);
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ prompts, total: allPrompts.length }));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get prompt history
|
||||
if (pathname === '/api/memory/prompts') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||
const search = url.searchParams.get('search') || null;
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
let prompts;
|
||||
|
||||
if (search) {
|
||||
prompts = memoryStore.searchPrompts(search, limit);
|
||||
} else {
|
||||
// Get all recent prompts (we'll need to add this method to MemoryStore)
|
||||
const stmt = memoryStore['db'].prepare(`
|
||||
SELECT * FROM prompt_history
|
||||
ORDER BY timestamp DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
prompts = stmt.all(limit);
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ prompts }));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get insights
|
||||
if (pathname === '/api/memory/insights') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
|
||||
// Get total prompt count
|
||||
const countStmt = memoryStore['db'].prepare(`SELECT COUNT(*) as count FROM prompt_history`);
|
||||
const { count: totalPrompts } = countStmt.get() as { count: number };
|
||||
|
||||
// Get top intent
|
||||
const topIntentStmt = memoryStore['db'].prepare(`
|
||||
SELECT intent_label, COUNT(*) as count
|
||||
FROM prompt_history
|
||||
WHERE intent_label IS NOT NULL
|
||||
GROUP BY intent_label
|
||||
ORDER BY count DESC
|
||||
LIMIT 1
|
||||
`);
|
||||
const topIntentRow = topIntentStmt.get() as { intent_label: string; count: number } | undefined;
|
||||
|
||||
// Get average prompt length
|
||||
const avgLengthStmt = memoryStore['db'].prepare(`
|
||||
SELECT AVG(LENGTH(prompt_text)) as avg_length
|
||||
FROM prompt_history
|
||||
WHERE prompt_text IS NOT NULL
|
||||
`);
|
||||
const { avg_length: avgLength } = avgLengthStmt.get() as { avg_length: number };
|
||||
|
||||
// Get prompt patterns
|
||||
const patternsStmt = memoryStore['db'].prepare(`
|
||||
SELECT * FROM prompt_patterns
|
||||
ORDER BY frequency DESC
|
||||
LIMIT 10
|
||||
`);
|
||||
const patterns = patternsStmt.all();
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
stats: {
|
||||
totalPrompts,
|
||||
topIntent: topIntentRow?.intent_label || 'unknown',
|
||||
avgLength: Math.round(avgLength || 0)
|
||||
},
|
||||
patterns: patterns.map((p: any) => ({
|
||||
type: p.pattern_type,
|
||||
description: `Pattern detected in prompts`,
|
||||
occurrences: p.frequency,
|
||||
suggestion: `Consider using more specific prompts for ${p.pattern_type}`
|
||||
}))
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Trigger async CLI-based insights analysis
|
||||
if (pathname === '/api/memory/insights/analyze' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body: any) => {
|
||||
const projectPath = body.path || initialPath;
|
||||
const tool = body.tool || 'gemini'; // gemini, qwen, codex
|
||||
const prompts = body.prompts || [];
|
||||
const lang = body.lang || 'en'; // Language preference
|
||||
|
||||
if (prompts.length === 0) {
|
||||
return { error: 'No prompts provided for analysis', status: 400 };
|
||||
}
|
||||
|
||||
// Prepare prompt summary for CLI analysis
|
||||
const promptSummary = prompts.slice(0, 20).map((p: any, i: number) => {
|
||||
return `${i + 1}. [${p.intent || 'unknown'}] ${(p.text || '').substring(0, 100)}...`;
|
||||
}).join('\n');
|
||||
|
||||
const langInstruction = lang === 'zh'
|
||||
? '请用中文回复。所有 description、suggestion、title 字段必须使用中文。'
|
||||
: 'Respond in English. All description, suggestion, title fields must be in English.';
|
||||
|
||||
const analysisPrompt = `
|
||||
PURPOSE: Analyze prompt patterns and provide optimization suggestions
|
||||
TASK:
|
||||
• Review the following prompt history summary
|
||||
• Identify common patterns (vague requests, repetitive queries, incomplete context)
|
||||
• Suggest specific improvements for prompt quality
|
||||
• Detect areas where prompts could be more effective
|
||||
MODE: analysis
|
||||
CONTEXT: ${prompts.length} prompts from project: ${projectPath}
|
||||
EXPECTED: JSON with patterns array and suggestions array
|
||||
LANGUAGE: ${langInstruction}
|
||||
|
||||
PROMPT HISTORY:
|
||||
${promptSummary}
|
||||
|
||||
Return ONLY valid JSON in this exact format (no markdown, no code blocks, just pure JSON):
|
||||
{
|
||||
"patterns": [
|
||||
{"type": "pattern_type", "description": "description", "occurrences": count, "severity": "low|medium|high", "suggestion": "how to improve"}
|
||||
],
|
||||
"suggestions": [
|
||||
{"title": "title", "description": "description", "example": "example prompt"}
|
||||
]
|
||||
}`;
|
||||
|
||||
try {
|
||||
// Queue CLI execution
|
||||
const result = await executeCliTool({
|
||||
tool,
|
||||
prompt: analysisPrompt,
|
||||
mode: 'analysis',
|
||||
timeout: 120000
|
||||
});
|
||||
|
||||
// Try to parse JSON from response
|
||||
let insights = { patterns: [], suggestions: [] };
|
||||
if (result.stdout) {
|
||||
let outputText = result.stdout;
|
||||
|
||||
// Strip markdown code blocks if present
|
||||
const codeBlockMatch = outputText.match(/```(?:json)?\s*([\s\S]*?)```/);
|
||||
if (codeBlockMatch) {
|
||||
outputText = codeBlockMatch[1].trim();
|
||||
}
|
||||
|
||||
// Find JSON object in the response
|
||||
const jsonMatch = outputText.match(/\{[\s\S]*\}/);
|
||||
if (jsonMatch) {
|
||||
try {
|
||||
insights = JSON.parse(jsonMatch[0]);
|
||||
// Ensure arrays exist
|
||||
if (!Array.isArray(insights.patterns)) insights.patterns = [];
|
||||
if (!Array.isArray(insights.suggestions)) insights.suggestions = [];
|
||||
} catch (e) {
|
||||
console.error('[insights/analyze] JSON parse error:', e);
|
||||
// Return raw output if JSON parse fails
|
||||
insights = {
|
||||
patterns: [{ type: 'raw_analysis', description: result.stdout.substring(0, 500), occurrences: 1, severity: 'low', suggestion: '' }],
|
||||
suggestions: []
|
||||
};
|
||||
}
|
||||
} else {
|
||||
// No JSON found, wrap raw output
|
||||
insights = {
|
||||
patterns: [{ type: 'raw_analysis', description: result.stdout.substring(0, 500), occurrences: 1, severity: 'low', suggestion: '' }],
|
||||
suggestions: []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
insights,
|
||||
tool,
|
||||
executionId: result.execution.id
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
return { error: (error as Error).message, status: 500 };
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Get conversations index
|
||||
if (pathname === '/api/memory/conversations') {
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
const project = url.searchParams.get('project') || null;
|
||||
const limit = parseInt(url.searchParams.get('limit') || '20', 10);
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
|
||||
let conversations;
|
||||
if (project) {
|
||||
const stmt = memoryStore['db'].prepare(`
|
||||
SELECT * FROM conversations
|
||||
WHERE project_name = ?
|
||||
ORDER BY updated_at DESC
|
||||
LIMIT ?
|
||||
`);
|
||||
conversations = stmt.all(project, limit);
|
||||
} else {
|
||||
conversations = memoryStore.getConversations(limit);
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ conversations }));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Replay conversation
|
||||
if (pathname.startsWith('/api/memory/replay/')) {
|
||||
const conversationId = pathname.replace('/api/memory/replay/', '');
|
||||
const projectPath = url.searchParams.get('path') || initialPath;
|
||||
|
||||
if (!conversationId) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Conversation ID is required' }));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const memoryStore = getMemoryStore(projectPath);
|
||||
const conversation = memoryStore.getConversation(conversationId);
|
||||
|
||||
if (!conversation) {
|
||||
res.writeHead(404, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: 'Conversation not found' }));
|
||||
return;
|
||||
}
|
||||
|
||||
const messages = memoryStore.getMessages(conversationId);
|
||||
|
||||
// Enhance messages with tool calls
|
||||
const messagesWithTools = [];
|
||||
for (const message of messages) {
|
||||
const toolCalls = message.id ? memoryStore.getToolCalls(message.id) : [];
|
||||
messagesWithTools.push({
|
||||
...message,
|
||||
tool_calls: toolCalls
|
||||
});
|
||||
}
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
conversation,
|
||||
messages: messagesWithTools
|
||||
}));
|
||||
} catch (error: unknown) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Memory Module - Import history (async task)
|
||||
if (pathname === '/api/memory/import' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { source = 'all', project, path: projectPath } = body;
|
||||
const basePath = projectPath || initialPath;
|
||||
|
||||
// Generate task ID for async operation
|
||||
const taskId = `import-${Date.now()}`;
|
||||
|
||||
// TODO: Implement actual history import using HistoryImporter
|
||||
// For now, return a placeholder response
|
||||
console.log(`[Memory] Import task ${taskId} started: source=${source}, project=${project}`);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
taskId,
|
||||
message: 'Import task started (not yet implemented)',
|
||||
source,
|
||||
project
|
||||
};
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// API: Update CLAUDE.md using CLI tools (Explorer view)
|
||||
if (pathname === '/api/update-claude-md' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
@@ -1520,6 +2148,65 @@ window.INITIAL_PATH = '${normalizePathForDisplay(initialPath).replace(/\\/g, '/'
|
||||
// MCP Configuration Functions
|
||||
// ========================================
|
||||
|
||||
/**
|
||||
* Derive prompt intent from text content
|
||||
*/
|
||||
function derivePromptIntent(text: string): string {
|
||||
const lower = text.toLowerCase();
|
||||
|
||||
// Implementation/coding patterns
|
||||
if (/实现|implement|create|add|build|write|develop|make/.test(lower)) return 'implement';
|
||||
if (/修复|fix|bug|error|issue|problem|解决/.test(lower)) return 'fix';
|
||||
if (/重构|refactor|optimize|improve|clean/.test(lower)) return 'refactor';
|
||||
if (/测试|test|spec|coverage/.test(lower)) return 'test';
|
||||
|
||||
// Analysis patterns
|
||||
if (/分析|analyze|review|check|examine|audit/.test(lower)) return 'analyze';
|
||||
if (/解释|explain|what|how|why|understand/.test(lower)) return 'explain';
|
||||
if (/搜索|search|find|look|where|locate/.test(lower)) return 'search';
|
||||
|
||||
// Documentation patterns
|
||||
if (/文档|document|readme|comment|注释/.test(lower)) return 'document';
|
||||
|
||||
// Planning patterns
|
||||
if (/计划|plan|design|architect|strategy/.test(lower)) return 'plan';
|
||||
|
||||
// Configuration patterns
|
||||
if (/配置|config|setup|install|设置/.test(lower)) return 'configure';
|
||||
|
||||
// Default
|
||||
return 'general';
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate prompt quality score (0-100)
|
||||
*/
|
||||
function calculateQualityScore(text: string): number {
|
||||
let score = 50; // Base score
|
||||
|
||||
// Length factors
|
||||
const length = text.length;
|
||||
if (length > 50 && length < 500) score += 15;
|
||||
else if (length >= 500 && length < 1000) score += 10;
|
||||
else if (length < 20) score -= 20;
|
||||
|
||||
// Specificity indicators
|
||||
if (/file|path|function|class|method|variable/i.test(text)) score += 10;
|
||||
if (/src\/|\.ts|\.js|\.py|\.go/i.test(text)) score += 10;
|
||||
|
||||
// Context indicators
|
||||
if (/when|after|before|because|since/i.test(text)) score += 5;
|
||||
|
||||
// Action clarity
|
||||
if (/please|要|请|帮|help/i.test(text)) score += 5;
|
||||
|
||||
// Structure indicators
|
||||
if (/\d+\.|•|-\s/.test(text)) score += 10; // Lists
|
||||
|
||||
// Cap at 100
|
||||
return Math.min(100, Math.max(0, score));
|
||||
}
|
||||
|
||||
/**
|
||||
* Safely read and parse JSON file
|
||||
* @param {string} filePath
|
||||
|
||||
Reference in New Issue
Block a user