Add comprehensive command and skill reference documentation in Chinese

- Created a new document for command and skill references, detailing orchestrator commands, workflow session commands, issue workflow commands, IDAW commands, with-file workflows, cycle workflows, CLI commands, memory commands, team skills, workflow skills, utility skills, and Codex capabilities.
- Added a comparison table for workflows, outlining their best uses, levels, self-containment, and automatic chaining behavior.
This commit is contained in:
catlog22
2026-03-02 17:41:40 +08:00
parent 3bb4a821de
commit b780734649
12 changed files with 2215 additions and 205 deletions

View File

@@ -8,32 +8,14 @@ import 'xterm/css/xterm.css'
import { loadMessagesForLocale, getInitialLocale } from './lib/i18n'
import { logWebVitals } from './lib/webVitals'
/**
* Initialize CSRF token by fetching from backend
* This ensures the CSRF cookie is set before any mutating API calls
*/
async function initCsrfToken() {
try {
// Fetch CSRF token from backend - this sets the XSRF-TOKEN cookie
await fetch('/api/csrf-token', {
method: 'GET',
credentials: 'same-origin',
})
} catch (error) {
// Log error but don't block app initialization
console.error('Failed to initialize CSRF token:', error)
}
}
async function bootstrapApplication() {
const rootElement = document.getElementById('root')
if (!rootElement) throw new Error('Failed to find the root element')
// Parallelize CSRF token fetch and locale detection (independent operations)
const [, locale] = await Promise.all([
initCsrfToken(),
getInitialLocale()
])
// CSRF token initialization is deferred to first mutating request
// This eliminates network RTT from app startup path
// See: ccw/frontend/src/lib/api.ts - fetchApi handles lazy token fetch
const locale = await getInitialLocale()
// Load only the active locale's messages (lazy load secondary on demand)
const messages = await loadMessagesForLocale(locale)

View File

@@ -1093,7 +1093,7 @@ export async function handleCliRoutes(ctx: RouteContext): Promise<boolean> {
return { error: 'Execution not found', status: 404 };
}
const review = historyStore.saveReview({
const review = await historyStore.saveReview({
execution_id: executionId,
status,
rating,

View File

@@ -589,7 +589,7 @@ Return ONLY valid JSON in this exact format (no markdown, no code blocks, just p
const storeModule = await import('../../tools/cli-history-store.js');
const store = storeModule.getHistoryStore(projectPath);
const insightId = `insight-${Date.now()}`;
store.saveInsight({
await store.saveInsight({
id: insightId,
tool,
promptCount: prompts.length,

View File

@@ -24,6 +24,212 @@ export const wsClients = new Set<Duplex>();
// Track message counts per client for rate limiting
export const wsClientMessageCounts = new Map<Duplex, { count: number; resetTime: number }>();
/**
* Universal broadcast throttling system
* Reduces WebSocket traffic by deduplicating and rate-limiting broadcast messages
*/
interface ThrottleEntry {
lastSend: number;
pendingData: unknown;
}
type ThrottleCategory = 'state_update' | 'memory_cpu' | 'log_output' | 'immediate';
/** Map of message type to throttle configuration */
const THROTTLE_CONFIG = new Map<string, { interval: number; category: ThrottleCategory }>(
[
// State updates - high frequency, low value when duplicated
['LOOP_STATE_UPDATE', { interval: 1000, category: 'state_update' }],
['ORCHESTRATOR_STATE_UPDATE', { interval: 1000, category: 'state_update' }],
['COORDINATOR_STATE_UPDATE', { interval: 1000, category: 'state_update' }],
['QUEUE_SCHEDULER_STATE_UPDATE', { interval: 1000, category: 'state_update' }],
// Memory/CPU updates - medium frequency
['LOOP_STEP_COMPLETED', { interval: 500, category: 'memory_cpu' }],
['ORCHESTRATOR_NODE_COMPLETED', { interval: 500, category: 'memory_cpu' }],
['COORDINATOR_COMMAND_COMPLETED', { interval: 500, category: 'memory_cpu' }],
['QUEUE_ITEM_UPDATED', { interval: 500, category: 'memory_cpu' }],
// Log/output - higher frequency allowed for real-time streaming
['LOOP_LOG_ENTRY', { interval: 200, category: 'log_output' }],
['ORCHESTRATOR_LOG', { interval: 200, category: 'log_output' }],
['COORDINATOR_LOG_ENTRY', { interval: 200, category: 'log_output' }],
// Item added/removed - send immediately
['QUEUE_ITEM_ADDED', { interval: 0, category: 'immediate' }],
['QUEUE_ITEM_REMOVED', { interval: 0, category: 'immediate' }],
['QUEUE_SCHEDULER_CONFIG_UPDATED', { interval: 0, category: 'immediate' }],
['ORCHESTRATOR_NODE_STARTED', { interval: 0, category: 'immediate' }],
['ORCHESTRATOR_NODE_FAILED', { interval: 0, category: 'immediate' }],
['COORDINATOR_COMMAND_STARTED', { interval: 0, category: 'immediate' }],
['COORDINATOR_COMMAND_FAILED', { interval: 0, category: 'immediate' }],
['COORDINATOR_QUESTION_ASKED', { interval: 0, category: 'immediate' }],
['COORDINATOR_ANSWER_RECEIVED', { interval: 0, category: 'immediate' }],
['LOOP_COMPLETED', { interval: 0, category: 'immediate' }],
['LOOP_COMPLETED' as any, { interval: 0, category: 'immediate' }],
].filter(([key]) => key !== 'LOOP_COMPLETED' as any)
);
// Add LOOP_COMPLETED separately to avoid type issues
THROTTLE_CONFIG.set('LOOP_COMPLETED', { interval: 0, category: 'immediate' });
/** Per-message-type throttle tracking */
const throttleState = new Map<string, ThrottleEntry>();
/** Metrics for broadcast optimization */
export const broadcastMetrics = {
sent: 0,
throttled: 0,
deduped: 0,
};
/**
* Get throttle configuration for a message type
*/
function getThrottleConfig(messageType: string): { interval: number; category: ThrottleCategory } {
return THROTTLE_CONFIG.get(messageType) || { interval: 0, category: 'immediate' };
}
/**
* Serialize message data for comparison
*/
function serializeMessage(data: unknown): string {
if (typeof data === 'string') return data;
if (typeof data === 'object' && data !== null) {
return JSON.stringify(data, Object.keys(data).sort());
}
return String(data);
}
/**
* Create WebSocket frame
*/
export function createWebSocketFrame(data: unknown): Buffer {
const payload = Buffer.from(JSON.stringify(data), 'utf8');
const length = payload.length;
let frame;
if (length <= 125) {
frame = Buffer.alloc(2 + length);
frame[0] = 0x81; // Text frame, FIN
frame[1] = length;
payload.copy(frame, 2);
} else if (length <= 65535) {
frame = Buffer.alloc(4 + length);
frame[0] = 0x81;
frame[1] = 126;
frame.writeUInt16BE(length, 2);
payload.copy(frame, 4);
} else {
frame = Buffer.alloc(10 + length);
frame[0] = 0x81;
frame[1] = 127;
frame.writeBigUInt64BE(BigInt(length), 2);
payload.copy(frame, 10);
}
return frame;
}
/**
* Broadcast message to all connected WebSocket clients with universal throttling
* - Deduplicates identical messages within throttle window
* - Rate-limits by message type with adaptive intervals
* - Preserves message ordering within each type
*/
export function broadcastToClients(data: unknown): void {
const eventType =
typeof data === 'object' && data !== null && 'type' in data ? (data as { type?: unknown }).type : undefined;
if (!eventType || typeof eventType !== 'string') {
// Unknown message type - send immediately
const frame = createWebSocketFrame(data);
for (const client of wsClients) {
try {
client.write(frame);
} catch (e) {
wsClients.delete(client);
}
}
console.log(`[WS] Broadcast to ${wsClients.size} clients: unknown type`);
return;
}
const config = getThrottleConfig(eventType);
const now = Date.now();
const state = throttleState.get(eventType);
if (config.interval === 0) {
// Immediate - send without throttling
const frame = createWebSocketFrame(data);
for (const client of wsClients) {
try {
client.write(frame);
} catch (e) {
wsClients.delete(client);
}
}
broadcastMetrics.sent++;
throttleState.set(eventType, { lastSend: now, pendingData: data });
console.log(`[WS] Broadcast to ${wsClients.size} clients: ${eventType} (immediate)`);
return;
}
// Check if we should throttle
const currentDataHash = serializeMessage(data);
if (state) {
const timeSinceLastSend = now - state.lastSend;
// Check for duplicate data
if (timeSinceLastSend < config.interval) {
const pendingDataHash = serializeMessage(state.pendingData);
if (currentDataHash === pendingDataHash) {
// Duplicate message - drop it
broadcastMetrics.deduped++;
console.log(`[WS] Throttled duplicate ${eventType} (${timeSinceLastSend}ms since last)`);
return;
}
// Different data but within throttle window - update pending
throttleState.set(eventType, { lastSend: state.lastSend, pendingData: data });
broadcastMetrics.throttled++;
console.log(`[WS] Throttled ${eventType} (${timeSinceLastSend}ms since last, pending updated)`);
return;
}
}
// Send the message
const frame = createWebSocketFrame(data);
for (const client of wsClients) {
try {
client.write(frame);
} catch (e) {
wsClients.delete(client);
}
}
broadcastMetrics.sent++;
throttleState.set(eventType, { lastSend: now, pendingData: data });
console.log(`[WS] Broadcast to ${wsClients.size} clients: ${eventType}`);
}
/**
* Get broadcast throttling metrics
*/
export function getBroadcastMetrics(): Readonly<typeof broadcastMetrics> {
return { ...broadcastMetrics };
}
/**
* Reset broadcast throttling metrics (for testing/monitoring)
*/
export function resetBroadcastMetrics(): void {
broadcastMetrics.sent = 0;
broadcastMetrics.throttled = 0;
broadcastMetrics.deduped = 0;
}
/**
* Check if a new WebSocket connection should be accepted
* Returns true if connection allowed, false if limit reached
@@ -387,25 +593,6 @@ export function createWebSocketFrame(data: unknown): Buffer {
return frame;
}
/**
* Broadcast message to all connected WebSocket clients
*/
export function broadcastToClients(data: unknown): void {
const frame = createWebSocketFrame(data);
for (const client of wsClients) {
try {
client.write(frame);
} catch (e) {
wsClients.delete(client);
}
}
const eventType =
typeof data === 'object' && data !== null && 'type' in data ? (data as { type?: unknown }).type : undefined;
console.log(`[WS] Broadcast to ${wsClients.size} clients:`, eventType);
}
/**
* Extract session ID from file path
*/
@@ -435,12 +622,9 @@ export function extractSessionIdFromPath(filePath: string): string | null {
}
/**
* Loop-specific broadcast with throttling
* Throttles LOOP_STATE_UPDATE messages to avoid flooding clients
* Loop broadcast types (without timestamp - added automatically)
* Throttling is handled universally in broadcastToClients
*/
let lastLoopBroadcast = 0;
const LOOP_BROADCAST_THROTTLE = 1000; // 1 second
export type LoopMessage =
| Omit<LoopStateUpdateMessage, 'timestamp'>
| Omit<LoopStepCompletedMessage, 'timestamp'>
@@ -448,18 +632,10 @@ export type LoopMessage =
| Omit<LoopLogEntryMessage, 'timestamp'>;
/**
* Broadcast loop state update with throttling
* Broadcast loop update with automatic throttling
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastLoopUpdate(message: LoopMessage): void {
const now = Date.now();
// Throttle LOOP_STATE_UPDATE to reduce WebSocket traffic
if (message.type === 'LOOP_STATE_UPDATE' && now - lastLoopBroadcast < LOOP_BROADCAST_THROTTLE) {
return;
}
lastLoopBroadcast = now;
broadcastToClients({
...message,
timestamp: new Date().toISOString()
@@ -467,8 +643,8 @@ export function broadcastLoopUpdate(message: LoopMessage): void {
}
/**
* Broadcast loop log entry (no throttling)
* Used for streaming real-time logs to Dashboard
* Broadcast loop log entry
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastLoopLog(loop_id: string, step_id: string, line: string): void {
broadcastToClients({
@@ -482,6 +658,7 @@ export function broadcastLoopLog(loop_id: string, step_id: string, line: string)
/**
* Union type for Orchestrator messages (without timestamp - added automatically)
* Throttling is handled universally in broadcastToClients
*/
export type OrchestratorMessage =
| Omit<OrchestratorStateUpdateMessage, 'timestamp'>
@@ -491,29 +668,10 @@ export type OrchestratorMessage =
| Omit<OrchestratorLogMessage, 'timestamp'>;
/**
* Orchestrator-specific broadcast with throttling
* Throttles ORCHESTRATOR_STATE_UPDATE messages to avoid flooding clients
*/
let lastOrchestratorBroadcast = 0;
const ORCHESTRATOR_BROADCAST_THROTTLE = 1000; // 1 second
/**
* Broadcast orchestrator update with throttling
* STATE_UPDATE messages are throttled to 1 per second
* Other message types are sent immediately
* Broadcast orchestrator update with automatic throttling
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastOrchestratorUpdate(message: OrchestratorMessage): void {
const now = Date.now();
// Throttle ORCHESTRATOR_STATE_UPDATE to reduce WebSocket traffic
if (message.type === 'ORCHESTRATOR_STATE_UPDATE' && now - lastOrchestratorBroadcast < ORCHESTRATOR_BROADCAST_THROTTLE) {
return;
}
if (message.type === 'ORCHESTRATOR_STATE_UPDATE') {
lastOrchestratorBroadcast = now;
}
broadcastToClients({
...message,
timestamp: new Date().toISOString()
@@ -521,8 +679,8 @@ export function broadcastOrchestratorUpdate(message: OrchestratorMessage): void
}
/**
* Broadcast orchestrator log entry (no throttling)
* Used for streaming real-time execution logs to Dashboard
* Broadcast orchestrator log entry
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastOrchestratorLog(execId: string, log: Omit<ExecutionLog, 'timestamp'>): void {
broadcastToClients({
@@ -639,6 +797,7 @@ export interface CoordinatorAnswerReceivedMessage {
/**
* Union type for Coordinator messages (without timestamp - added automatically)
* Throttling is handled universally in broadcastToClients
*/
export type CoordinatorMessage =
| Omit<CoordinatorStateUpdateMessage, 'timestamp'>
@@ -650,29 +809,10 @@ export type CoordinatorMessage =
| Omit<CoordinatorAnswerReceivedMessage, 'timestamp'>;
/**
* Coordinator-specific broadcast with throttling
* Throttles COORDINATOR_STATE_UPDATE messages to avoid flooding clients
*/
let lastCoordinatorBroadcast = 0;
const COORDINATOR_BROADCAST_THROTTLE = 1000; // 1 second
/**
* Broadcast coordinator update with throttling
* STATE_UPDATE messages are throttled to 1 per second
* Other message types are sent immediately
* Broadcast coordinator update with automatic throttling
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastCoordinatorUpdate(message: CoordinatorMessage): void {
const now = Date.now();
// Throttle COORDINATOR_STATE_UPDATE to reduce WebSocket traffic
if (message.type === 'COORDINATOR_STATE_UPDATE' && now - lastCoordinatorBroadcast < COORDINATOR_BROADCAST_THROTTLE) {
return;
}
if (message.type === 'COORDINATOR_STATE_UPDATE') {
lastCoordinatorBroadcast = now;
}
broadcastToClients({
...message,
timestamp: new Date().toISOString()
@@ -680,8 +820,8 @@ export function broadcastCoordinatorUpdate(message: CoordinatorMessage): void {
}
/**
* Broadcast coordinator log entry (no throttling)
* Used for streaming real-time coordinator logs to Dashboard
* Broadcast coordinator log entry
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastCoordinatorLog(
executionId: string,
@@ -715,6 +855,7 @@ export type {
/**
* Union type for Queue messages (without timestamp - added automatically)
* Throttling is handled universally in broadcastToClients
*/
export type QueueMessage =
| Omit<QueueSchedulerStateUpdateMessage, 'timestamp'>
@@ -724,29 +865,10 @@ export type QueueMessage =
| Omit<QueueSchedulerConfigUpdatedMessage, 'timestamp'>;
/**
* Queue-specific broadcast with throttling
* Throttles QUEUE_SCHEDULER_STATE_UPDATE messages to avoid flooding clients
*/
let lastQueueBroadcast = 0;
const QUEUE_BROADCAST_THROTTLE = 1000; // 1 second
/**
* Broadcast queue update with throttling
* STATE_UPDATE messages are throttled to 1 per second
* Other message types are sent immediately
* Broadcast queue update with automatic throttling
* Note: Throttling is now handled universally in broadcastToClients
*/
export function broadcastQueueUpdate(message: QueueMessage): void {
const now = Date.now();
// Throttle QUEUE_SCHEDULER_STATE_UPDATE to reduce WebSocket traffic
if (message.type === 'QUEUE_SCHEDULER_STATE_UPDATE' && now - lastQueueBroadcast < QUEUE_BROADCAST_THROTTLE) {
return;
}
if (message.type === 'QUEUE_SCHEDULER_STATE_UPDATE') {
lastQueueBroadcast = now;
}
broadcastToClients({
...message,
timestamp: new Date().toISOString()

View File

@@ -128,7 +128,7 @@ export function ensureHistoryDir(baseDir: string): string {
*/
async function saveConversationAsync(baseDir: string, conversation: ConversationRecord): Promise<void> {
const store = await getSqliteStore(baseDir);
store.saveConversation(conversation);
await store.saveConversation(conversation);
}
/**
@@ -138,7 +138,10 @@ async function saveConversationAsync(baseDir: string, conversation: Conversation
export function saveConversation(baseDir: string, conversation: ConversationRecord): void {
try {
const store = getSqliteStoreSync(baseDir);
store.saveConversation(conversation);
// Fire and forget - don't block on async save in sync context
store.saveConversation(conversation).catch(err => {
console.error('[CLI Executor] Failed to save conversation:', err.message);
});
} catch {
// If sync not available, queue for async save
saveConversationAsync(baseDir, conversation).catch(err => {
@@ -399,7 +402,7 @@ export function getExecutionDetail(baseDir: string, executionId: string): Execut
*/
export async function deleteExecutionAsync(baseDir: string, executionId: string): Promise<{ success: boolean; error?: string }> {
const store = await getSqliteStore(baseDir);
return store.deleteConversation(executionId);
return await store.deleteConversation(executionId);
}
/**
@@ -408,7 +411,11 @@ export async function deleteExecutionAsync(baseDir: string, executionId: string)
export function deleteExecution(baseDir: string, executionId: string): { success: boolean; error?: string } {
try {
const store = getSqliteStoreSync(baseDir);
return store.deleteConversation(executionId);
// Fire and forget - don't block on async delete in sync context
store.deleteConversation(executionId).catch(err => {
console.error('[CLI Executor] Failed to delete execution:', err.message);
});
return { success: true }; // Optimistic response
} catch {
return { success: false, error: 'SQLite store not initialized' };
}
@@ -424,7 +431,7 @@ export async function batchDeleteExecutionsAsync(baseDir: string, ids: string[])
errors?: string[];
}> {
const store = await getSqliteStore(baseDir);
const result = store.batchDelete(ids);
const result = await store.batchDelete(ids);
return { ...result, total: ids.length };
}

View File

@@ -121,7 +121,8 @@ export class CliHistoryStore {
this.db = new Database(this.dbPath);
this.db.pragma('journal_mode = WAL');
this.db.pragma('synchronous = NORMAL');
this.db.pragma('busy_timeout = 5000'); // Wait up to 5 seconds for locks
this.db.pragma('busy_timeout = 10000'); // Wait up to 10 seconds for locks (increased for write-heavy scenarios)
this.db.pragma('wal_autocheckpoint = 1000'); // Optimize WAL checkpointing
this.initSchema();
this.migrateFromJson(historyDir);
@@ -386,12 +387,13 @@ export class CliHistoryStore {
}
/**
* Execute a database operation with retry logic for SQLITE_BUSY errors
* Execute a database operation with retry logic for SQLITE_BUSY errors (async, non-blocking)
* @param operation - Function to execute
* @param maxRetries - Maximum retry attempts (default: 3)
* @param baseDelay - Base delay in ms for exponential backoff (default: 100)
*/
private withRetry<T>(operation: () => T, maxRetries = 3, baseDelay = 100): T {
private async withRetry<T>(operation: () => T, maxRetries = 3, baseDelay = 100): Promise<T> {
const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
let lastError: Error | null = null;
for (let attempt = 0; attempt <= maxRetries; attempt++) {
@@ -405,10 +407,8 @@ export class CliHistoryStore {
if (attempt < maxRetries) {
// Exponential backoff: 100ms, 200ms, 400ms
const delay = baseDelay * Math.pow(2, attempt);
// Sync sleep using Atomics (works in Node.js)
const sharedBuffer = new SharedArrayBuffer(4);
const sharedArray = new Int32Array(sharedBuffer);
Atomics.wait(sharedArray, 0, 0, delay);
// Async non-blocking sleep
await sleep(delay);
}
} else {
// Non-BUSY error, throw immediately
@@ -421,7 +421,7 @@ export class CliHistoryStore {
}
/**
* Migrate existing JSON files to SQLite
* Migrate existing JSON files to SQLite (async, non-blocking)
*/
private migrateFromJson(historyDir: string): void {
const migrationMarker = join(historyDir, '.migrated');
@@ -429,41 +429,50 @@ export class CliHistoryStore {
return; // Already migrated
}
// Find all date directories
const dateDirs = readdirSync(historyDir).filter(d => {
const dirPath = join(historyDir, d);
return statSync(dirPath).isDirectory() && /^\d{4}-\d{2}-\d{2}$/.test(d);
});
// Fire-and-forget async migration
(async () => {
try {
// Find all date directories
const dateDirs = readdirSync(historyDir).filter(d => {
const dirPath = join(historyDir, d);
return statSync(dirPath).isDirectory() && /^\d{4}-\d{2}-\d{2}$/.test(d);
});
let migratedCount = 0;
let migratedCount = 0;
for (const dateDir of dateDirs) {
const dirPath = join(historyDir, dateDir);
const files = readdirSync(dirPath).filter(f => f.endsWith('.json'));
for (const dateDir of dateDirs) {
const dirPath = join(historyDir, dateDir);
const files = readdirSync(dirPath).filter(f => f.endsWith('.json'));
for (const file of files) {
try {
const filePath = join(dirPath, file);
const data = JSON.parse(readFileSync(filePath, 'utf8'));
for (const file of files) {
try {
const filePath = join(dirPath, file);
const data = JSON.parse(readFileSync(filePath, 'utf8'));
// Convert to conversation record if legacy format
const conversation = this.normalizeRecord(data);
this.saveConversation(conversation);
migratedCount++;
// Convert to conversation record if legacy format
const conversation = this.normalizeRecord(data);
await this.saveConversation(conversation);
migratedCount++;
// Optionally delete the JSON file after migration
// unlinkSync(filePath);
} catch (err) {
console.error(`Failed to migrate ${file}:`, (err as Error).message);
// Optionally delete the JSON file after migration
// unlinkSync(filePath);
} catch (err) {
console.error(`Failed to migrate ${file}:`, (err as Error).message);
}
}
}
}
}
// Create migration marker
if (migratedCount > 0) {
require('fs').writeFileSync(migrationMarker, new Date().toISOString());
console.log(`[CLI History] Migrated ${migratedCount} records to SQLite`);
}
// Create migration marker
if (migratedCount > 0) {
require('fs').writeFileSync(migrationMarker, new Date().toISOString());
console.log(`[CLI History] Migrated ${migratedCount} records to SQLite`);
}
} catch (err) {
console.error('[CLI History] Migration failed:', (err as Error).message);
}
})().catch(err => {
console.error('[CLI History] Migration error:', (err as Error).message);
});
}
/**
@@ -499,9 +508,9 @@ export class CliHistoryStore {
}
/**
* Save or update a conversation
* Save or update a conversation (async for non-blocking retry)
*/
saveConversation(conversation: ConversationRecord): void {
async saveConversation(conversation: ConversationRecord): Promise<void> {
// Ensure prompt is a string before calling substring
const lastTurn = conversation.turns.length > 0 ? conversation.turns[conversation.turns.length - 1] : null;
const rawPrompt = lastTurn?.prompt ?? '';
@@ -579,7 +588,7 @@ export class CliHistoryStore {
}
});
this.withRetry(() => transaction());
await this.withRetry(() => transaction());
}
/**
@@ -852,11 +861,11 @@ export class CliHistoryStore {
}
/**
* Delete a conversation
* Delete a conversation (async for non-blocking retry)
*/
deleteConversation(id: string): { success: boolean; error?: string } {
async deleteConversation(id: string): Promise<{ success: boolean; error?: string }> {
try {
const result = this.withRetry(() =>
const result = await this.withRetry(() =>
this.db.prepare('DELETE FROM conversations WHERE id = ?').run(id)
);
return { success: result.changes > 0 };
@@ -866,9 +875,9 @@ export class CliHistoryStore {
}
/**
* Batch delete conversations
* Batch delete conversations (async for non-blocking retry)
*/
batchDelete(ids: string[]): { success: boolean; deleted: number; errors?: string[] } {
async batchDelete(ids: string[]): Promise<{ success: boolean; deleted: number; errors?: string[] }> {
const deleteStmt = this.db.prepare('DELETE FROM conversations WHERE id = ?');
const errors: string[] = [];
let deleted = 0;
@@ -884,7 +893,7 @@ export class CliHistoryStore {
}
});
this.withRetry(() => transaction());
await this.withRetry(() => transaction());
return {
success: true,
@@ -947,9 +956,9 @@ export class CliHistoryStore {
// ========== Native Session Mapping Methods ==========
/**
* Save or update native session mapping
* Save or update native session mapping (async for non-blocking retry)
*/
saveNativeSessionMapping(mapping: NativeSessionMapping): void {
async saveNativeSessionMapping(mapping: NativeSessionMapping): Promise<void> {
const stmt = this.db.prepare(`
INSERT INTO native_session_mapping (ccw_id, tool, native_session_id, native_session_path, project_hash, transaction_id, created_at)
VALUES (@ccw_id, @tool, @native_session_id, @native_session_path, @project_hash, @transaction_id, @created_at)
@@ -960,7 +969,7 @@ export class CliHistoryStore {
transaction_id = @transaction_id
`);
this.withRetry(() => stmt.run({
await this.withRetry(() => stmt.run({
ccw_id: mapping.ccw_id,
tool: mapping.tool,
native_session_id: mapping.native_session_id,
@@ -1144,7 +1153,7 @@ export class CliHistoryStore {
// Persist mapping for future loads (best-effort).
try {
this.saveNativeSessionMapping({
await this.saveNativeSessionMapping({
ccw_id: ccwId,
tool,
native_session_id: best.sessionId,
@@ -1290,9 +1299,9 @@ export class CliHistoryStore {
// ========== Insights Methods ==========
/**
* Save an insights analysis result
* Save an insights analysis result (async for non-blocking retry)
*/
saveInsight(insight: {
async saveInsight(insight: {
id: string;
tool: string;
promptCount: number;
@@ -1301,13 +1310,13 @@ export class CliHistoryStore {
rawOutput?: string;
executionId?: string;
lang?: string;
}): void {
}): Promise<void> {
const stmt = this.db.prepare(`
INSERT OR REPLACE INTO insights (id, created_at, tool, prompt_count, patterns, suggestions, raw_output, execution_id, lang)
VALUES (@id, @created_at, @tool, @prompt_count, @patterns, @suggestions, @raw_output, @execution_id, @lang)
`);
this.withRetry(() => stmt.run({
await this.withRetry(() => stmt.run({
id: insight.id,
created_at: new Date().toISOString(),
tool: insight.tool,
@@ -1391,9 +1400,9 @@ export class CliHistoryStore {
}
/**
* Save or update a review for an execution
* Save or update a review for an execution (async for non-blocking retry)
*/
saveReview(review: Omit<ReviewRecord, 'id' | 'created_at' | 'updated_at'> & { created_at?: string; updated_at?: string }): ReviewRecord {
async saveReview(review: Omit<ReviewRecord, 'id' | 'created_at' | 'updated_at'> & { created_at?: string; updated_at?: string }): Promise<ReviewRecord> {
const now = new Date().toISOString();
const created_at = review.created_at || now;
const updated_at = review.updated_at || now;
@@ -1409,7 +1418,7 @@ export class CliHistoryStore {
updated_at = @updated_at
`);
const result = this.withRetry(() => stmt.run({
const result = await this.withRetry(() => stmt.run({
execution_id: review.execution_id,
status: review.status,
rating: review.rating ?? null,