mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-03-10 17:11:04 +08:00
feat: 添加参数模式不匹配检测功能,提供用户友好的错误信息
fix: 更新数据库字段类型,确保时间戳使用REAL类型 refactor: 优化DeepWiki服务和存储的查询逻辑,支持批量路径处理 refactor: 移除无用的worker_agent字段,简化团队配置
This commit is contained in:
@@ -4,7 +4,6 @@
|
||||
"team_display_name": "Architecture Optimization",
|
||||
"skill_name": "team-arch-opt",
|
||||
"skill_path": ".claude/skills/team-arch-opt/",
|
||||
"worker_agent": "team-worker",
|
||||
"pipeline_type": "Linear with Review-Fix Cycle (Parallel-Capable)",
|
||||
"completion_action": "interactive",
|
||||
"has_inline_discuss": true,
|
||||
|
||||
@@ -27,6 +27,7 @@ export interface DeepWikiSymbol {
|
||||
end_line: number;
|
||||
created_at: number | null;
|
||||
updated_at: number | null;
|
||||
staleness_score: number;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -281,16 +282,19 @@ export class DeepWikiService {
|
||||
staleness_score: number;
|
||||
}>>();
|
||||
|
||||
const stmt = db.prepare(`
|
||||
SELECT name, type, doc_file, anchor, start_line, end_line, staleness_score
|
||||
FROM deepwiki_symbols
|
||||
WHERE source_file = ?
|
||||
ORDER BY start_line
|
||||
`);
|
||||
if (paths.length === 0) {
|
||||
return result;
|
||||
}
|
||||
|
||||
for (const filePath of paths) {
|
||||
const normalizedPath = filePath.replace(/\\/g, '/');
|
||||
const rows = stmt.all(normalizedPath) as Array<{
|
||||
const normalizedPaths = paths.map(p => p.replace(/\\/g, '/'));
|
||||
const placeholders = normalizedPaths.map(() => '?').join(',');
|
||||
const rows = db.prepare(`
|
||||
SELECT source_file, name, type, doc_file, anchor, start_line, end_line, staleness_score
|
||||
FROM deepwiki_symbols
|
||||
WHERE source_file IN (${placeholders})
|
||||
ORDER BY source_file, start_line
|
||||
`).all(...normalizedPaths) as Array<{
|
||||
source_file: string;
|
||||
name: string;
|
||||
type: string;
|
||||
doc_file: string;
|
||||
@@ -300,9 +304,18 @@ export class DeepWikiService {
|
||||
staleness_score: number;
|
||||
}>;
|
||||
|
||||
if (rows.length > 0) {
|
||||
result.set(normalizedPath, rows);
|
||||
}
|
||||
for (const row of rows) {
|
||||
const existing = result.get(row.source_file) || [];
|
||||
existing.push({
|
||||
name: row.name,
|
||||
type: row.type,
|
||||
doc_file: row.doc_file,
|
||||
anchor: row.anchor,
|
||||
start_line: row.start_line,
|
||||
end_line: row.end_line,
|
||||
staleness_score: row.staleness_score,
|
||||
});
|
||||
result.set(row.source_file, existing);
|
||||
}
|
||||
|
||||
return result;
|
||||
@@ -328,17 +341,36 @@ export class DeepWikiService {
|
||||
}
|
||||
|
||||
try {
|
||||
const stmt = db.prepare('SELECT content_hash FROM deepwiki_files WHERE path = ?');
|
||||
const stale: Array<{ path: string; stored_hash: string | null; current_hash: string }> = [];
|
||||
if (files.length === 0) {
|
||||
return stale;
|
||||
}
|
||||
|
||||
// Build lookup: normalizedPath -> original file
|
||||
const lookup = new Map<string, { path: string; hash: string }>();
|
||||
const normalizedPaths: string[] = [];
|
||||
for (const file of files) {
|
||||
const normalizedPath = file.path.replace(/\\/g, '/');
|
||||
const row = stmt.get(normalizedPath) as { content_hash: string } | undefined;
|
||||
const np = file.path.replace(/\\/g, '/');
|
||||
lookup.set(np, file);
|
||||
normalizedPaths.push(np);
|
||||
}
|
||||
|
||||
if (row && row.content_hash !== file.hash) {
|
||||
stale.push({ path: file.path, stored_hash: row.content_hash, current_hash: file.hash });
|
||||
} else if (!row) {
|
||||
const placeholders = normalizedPaths.map(() => '?').join(',');
|
||||
const rows = db.prepare(
|
||||
`SELECT path, content_hash FROM deepwiki_files WHERE path IN (${placeholders})`
|
||||
).all(...normalizedPaths) as Array<{ path: string; content_hash: string }>;
|
||||
|
||||
const stored = new Map<string, string>();
|
||||
for (const row of rows) {
|
||||
stored.set(row.path, row.content_hash);
|
||||
}
|
||||
|
||||
for (const [np, file] of lookup) {
|
||||
const storedHash = stored.get(np);
|
||||
if (storedHash === undefined) {
|
||||
stale.push({ path: file.path, stored_hash: null, current_hash: file.hash });
|
||||
} else if (storedHash !== file.hash) {
|
||||
stale.push({ path: file.path, stored_hash: storedHash, current_hash: file.hash });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -38,20 +38,36 @@ import {
|
||||
// Track all running child processes for cleanup on interruption (multi-process support)
|
||||
const runningChildProcesses = new Set<ChildProcess>();
|
||||
|
||||
// Debug logging for parallel execution testing
|
||||
const DEBUG_SESSION_ID = 'DBG-parallel-ccw-cli-test-2026-03-07';
|
||||
const DEBUG_LOG_PATH = path.join(process.cwd(), '.workflow', '.debug', DEBUG_SESSION_ID, 'debug.log');
|
||||
|
||||
function writeDebugLog(event: string, data: Record<string, any>): void {
|
||||
try {
|
||||
const logEntry = JSON.stringify({ event, ...data, timestamp: new Date().toISOString() }) + '\n';
|
||||
fs.appendFileSync(DEBUG_LOG_PATH, logEntry, 'utf8');
|
||||
} catch (err) {
|
||||
// Silently ignore logging errors to avoid disrupting execution
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Kill all running CLI child processes
|
||||
* Called when parent process receives SIGINT/SIGTERM
|
||||
*/
|
||||
export function killAllCliProcesses(): boolean {
|
||||
if (runningChildProcesses.size === 0) return false;
|
||||
writeDebugLog('KILL_ALL_START', { initial_set_size: runningChildProcesses.size });
|
||||
|
||||
const processesToKill = Array.from(runningChildProcesses);
|
||||
debugLog('KILL', `Killing ${processesToKill.length} child process(es)`, { pids: processesToKill.map(p => p.pid) });
|
||||
writeDebugLog('KILL_ALL_COPY', { pids_to_kill: processesToKill.map(p => p.pid) });
|
||||
|
||||
// 1. SIGTERM for graceful shutdown
|
||||
for (const child of processesToKill) {
|
||||
if (!child.killed) {
|
||||
try { child.kill('SIGTERM'); } catch { /* Ignore kill errors */ }
|
||||
try { child.kill('SIGTERM'); } catch (e: any) { writeDebugLog('KILL_SIGTERM_ERROR', { pid: child.pid, error: e.message }); }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,12 +75,13 @@ export function killAllCliProcesses(): boolean {
|
||||
const killTimeout = setTimeout(() => {
|
||||
for (const child of processesToKill) {
|
||||
if (!child.killed) {
|
||||
try { child.kill('SIGKILL'); } catch { /* Ignore kill errors */ }
|
||||
try { child.kill('SIGKILL'); } catch (e: any) { writeDebugLog('KILL_SIGKILL_ERROR', { pid: child.pid, error: e.message }); }
|
||||
}
|
||||
}
|
||||
}, 2000);
|
||||
killTimeout.unref();
|
||||
|
||||
writeDebugLog('KILL_ALL_CLEAR', { set_size_before: runningChildProcesses.size, pids_in_set: Array.from(runningChildProcesses).map(p => p.pid) });
|
||||
runningChildProcesses.clear();
|
||||
return true;
|
||||
}
|
||||
@@ -240,6 +257,7 @@ async function executeClaudeWithSettings(params: ClaudeWithSettingsParams): Prom
|
||||
|
||||
// Track child process for cleanup (multi-process support)
|
||||
runningChildProcesses.add(child);
|
||||
writeDebugLog('PROCESS_ADD', { pid: child.pid, set_size_after: runningChildProcesses.size, function: 'executeClaudeWithSettings' });
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
@@ -279,6 +297,7 @@ async function executeClaudeWithSettings(params: ClaudeWithSettingsParams): Prom
|
||||
|
||||
child.on('close', (code) => {
|
||||
runningChildProcesses.delete(child);
|
||||
writeDebugLog('PROCESS_DELETE', { pid: child.pid, exit_code: code, set_size_after: runningChildProcesses.size, function: 'executeClaudeWithSettings', handler: 'close' });
|
||||
|
||||
const endTime = Date.now();
|
||||
const duration = endTime - startTime;
|
||||
@@ -319,8 +338,10 @@ async function executeClaudeWithSettings(params: ClaudeWithSettingsParams): Prom
|
||||
|
||||
// Save to history
|
||||
try {
|
||||
writeDebugLog('SAVE_CONVERSATION_START', { conversationId: conversation.id, pid: child.pid, function: 'executeClaudeWithSettings' });
|
||||
saveConversation(workingDir, conversation);
|
||||
} catch (err) {
|
||||
writeDebugLog('SAVE_CONVERSATION_ERROR', { conversationId: conversation.id, pid: child.pid, error: (err as Error).message, stack: (err as Error).stack, function: 'executeClaudeWithSettings' });
|
||||
console.error('[CLI Executor] Failed to save CLI封装 history:', (err as Error).message);
|
||||
}
|
||||
|
||||
@@ -335,6 +356,7 @@ async function executeClaudeWithSettings(params: ClaudeWithSettingsParams): Prom
|
||||
|
||||
child.on('error', (error) => {
|
||||
runningChildProcesses.delete(child);
|
||||
writeDebugLog('PROCESS_DELETE', { pid: child.pid, set_size_after: runningChildProcesses.size, function: 'executeClaudeWithSettings', handler: 'error' });
|
||||
reject(new Error(`Failed to spawn claude: ${error.message}`));
|
||||
});
|
||||
});
|
||||
@@ -997,6 +1019,7 @@ async function executeCliTool(
|
||||
|
||||
// Track child process for cleanup on interruption (multi-process support)
|
||||
runningChildProcesses.add(child);
|
||||
writeDebugLog('PROCESS_ADD', { pid: child.pid, set_size_after: runningChildProcesses.size, function: 'executeCliTool', tool });
|
||||
|
||||
debugLog('SPAWN', `Process spawned`, { pid: child.pid });
|
||||
|
||||
@@ -1048,6 +1071,7 @@ async function executeCliTool(
|
||||
child.on('close', async (code) => {
|
||||
// Remove from running processes
|
||||
runningChildProcesses.delete(child);
|
||||
writeDebugLog('PROCESS_DELETE', { pid: child.pid, exit_code: code, set_size_after: runningChildProcesses.size, function: 'executeCliTool', handler: 'close', tool });
|
||||
|
||||
// Flush remaining buffer from parser
|
||||
const remainingUnits = parser.flush();
|
||||
@@ -1176,9 +1200,11 @@ async function executeCliTool(
|
||||
// Save all source conversations
|
||||
try {
|
||||
for (const conv of savedConversations) {
|
||||
writeDebugLog('SAVE_CONVERSATION_START', { conversationId: conv.id, pid: child.pid, function: 'executeCliTool', context: 'merge-loop', tool });
|
||||
saveConversation(workingDir, conv);
|
||||
}
|
||||
} catch (err) {
|
||||
writeDebugLog('SAVE_CONVERSATION_ERROR', { pid: child.pid, error: (err as Error).message, stack: (err as Error).stack, function: 'executeCliTool', context: 'merge-loop', tool });
|
||||
console.error('[CLI Executor] Failed to save merged histories:', (err as Error).message);
|
||||
}
|
||||
} else if (isMerge && mergeResult && customId) {
|
||||
@@ -1218,8 +1244,10 @@ async function executeCliTool(
|
||||
};
|
||||
// Save merged conversation
|
||||
try {
|
||||
writeDebugLog('SAVE_CONVERSATION_START', { conversationId: conversation.id, pid: child.pid, function: 'executeCliTool', context: 'merge-with-id', tool });
|
||||
saveConversation(workingDir, conversation);
|
||||
} catch (err) {
|
||||
writeDebugLog('SAVE_CONVERSATION_ERROR', { conversationId: conversation.id, pid: child.pid, error: (err as Error).message, stack: (err as Error).stack, function: 'executeCliTool', context: 'merge-with-id', tool });
|
||||
console.error('[CLI Executor] Failed to save merged conversation:', (err as Error).message);
|
||||
}
|
||||
} else {
|
||||
@@ -1249,8 +1277,10 @@ async function executeCliTool(
|
||||
};
|
||||
// Try to save conversation to history
|
||||
try {
|
||||
writeDebugLog('SAVE_CONVERSATION_START', { conversationId: conversation.id, pid: child.pid, function: 'executeCliTool', context: 'normal', tool });
|
||||
saveConversation(workingDir, conversation);
|
||||
} catch (err) {
|
||||
writeDebugLog('SAVE_CONVERSATION_ERROR', { conversationId: conversation.id, pid: child.pid, error: (err as Error).message, stack: (err as Error).stack, function: 'executeCliTool', context: 'normal', tool });
|
||||
// Non-fatal: continue even if history save fails
|
||||
console.error('[CLI Executor] Failed to save history:', (err as Error).message);
|
||||
}
|
||||
@@ -1311,6 +1341,7 @@ async function executeCliTool(
|
||||
child.on('error', (error) => {
|
||||
// Remove from running processes
|
||||
runningChildProcesses.delete(child);
|
||||
writeDebugLog('PROCESS_DELETE', { pid: child.pid, set_size_after: runningChildProcesses.size, function: 'executeCliTool', handler: 'error', tool });
|
||||
|
||||
errorLog('SPAWN', `Failed to spawn process`, error, {
|
||||
tool,
|
||||
|
||||
@@ -573,8 +573,44 @@ interface CompactEditResult {
|
||||
dryRun?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect parameter mode mismatch and provide helpful error message
|
||||
* This helps users understand when they're using wrong parameters for the selected mode
|
||||
*/
|
||||
function detectModeMismatch(params: Record<string, unknown>): string | null {
|
||||
const hasLineParams = ['operation', 'line', 'end_line'].some(p => params[p] !== undefined);
|
||||
const hasUpdateParams = ['oldText', 'newText', 'edits', 'replaceAll'].some(p => params[p] !== undefined);
|
||||
const currentMode = params.mode as string | undefined;
|
||||
|
||||
// User passed line-mode params but mode is not "line"
|
||||
if (hasLineParams && currentMode !== 'line') {
|
||||
if (currentMode === 'update' || currentMode === undefined) {
|
||||
const modeHint = currentMode === undefined ? '(default)' : '';
|
||||
return `Parameter mismatch: detected line-mode parameters (operation/line/end_line) ` +
|
||||
`but mode="${currentMode || 'update'}"${modeHint}. ` +
|
||||
`Add \`mode: "line"\` to use operation/line parameters, ` +
|
||||
`or use oldText/newText/edits for update mode.`;
|
||||
}
|
||||
}
|
||||
|
||||
// User passed update-mode params but mode is "line"
|
||||
if (hasUpdateParams && currentMode === 'line') {
|
||||
return `Parameter mismatch: detected update-mode parameters (oldText/newText/edits/replaceAll) ` +
|
||||
`but mode="line". ` +
|
||||
`Remove \`mode: "line"\` or use operation/line parameters instead.`;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Handler function
|
||||
export async function handler(params: Record<string, unknown>): Promise<ToolResult<CompactEditResult>> {
|
||||
// Check for mode mismatch before validation
|
||||
const mismatchError = detectModeMismatch(params);
|
||||
if (mismatchError) {
|
||||
return { success: false, error: mismatchError };
|
||||
}
|
||||
|
||||
// Apply default mode before discriminatedUnion check (Zod doesn't apply defaults on discriminator)
|
||||
const normalizedParams = params.mode === undefined ? { ...params, mode: 'update' } : params;
|
||||
const parsed = ParamsSchema.safeParse(normalizedParams);
|
||||
|
||||
@@ -209,9 +209,9 @@ class DeepWikiStore:
|
||||
attempts INTEGER DEFAULT 0,
|
||||
last_tool TEXT,
|
||||
last_error TEXT,
|
||||
generated_at TEXT,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
generated_at REAL,
|
||||
created_at REAL,
|
||||
updated_at REAL
|
||||
)
|
||||
"""
|
||||
)
|
||||
@@ -873,25 +873,33 @@ class DeepWikiStore:
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
stale = []
|
||||
if not files:
|
||||
return []
|
||||
|
||||
# Build lookup: normalized_path -> original file dict
|
||||
lookup: dict[str, dict[str, str]] = {}
|
||||
normalized: list[str] = []
|
||||
for f in files:
|
||||
path_str = self._normalize_path(f["path"])
|
||||
row = conn.execute(
|
||||
"SELECT content_hash FROM deepwiki_files WHERE path=?",
|
||||
(path_str,),
|
||||
).fetchone()
|
||||
if row and row["content_hash"] != f["hash"]:
|
||||
stale.append({
|
||||
"path": f["path"],
|
||||
"stored_hash": row["content_hash"],
|
||||
"current_hash": f["hash"],
|
||||
})
|
||||
elif not row:
|
||||
stale.append({
|
||||
"path": f["path"],
|
||||
"stored_hash": None,
|
||||
"current_hash": f["hash"],
|
||||
})
|
||||
lookup[path_str] = f
|
||||
normalized.append(path_str)
|
||||
|
||||
placeholders = ",".join("?" * len(normalized))
|
||||
rows = conn.execute(
|
||||
f"SELECT path, content_hash FROM deepwiki_files WHERE path IN ({placeholders})",
|
||||
normalized,
|
||||
).fetchall()
|
||||
|
||||
stored: dict[str, str] = {row["path"]: row["content_hash"] for row in rows}
|
||||
|
||||
stale = []
|
||||
for path_str, f in lookup.items():
|
||||
stored_hash = stored.get(path_str)
|
||||
if stored_hash is None:
|
||||
stale.append({"path": f["path"], "stored_hash": None, "current_hash": f["hash"]})
|
||||
elif stored_hash != f["hash"]:
|
||||
stale.append({"path": f["path"], "stored_hash": stored_hash, "current_hash": f["hash"]})
|
||||
|
||||
return stale
|
||||
|
||||
def get_symbols_for_paths(
|
||||
@@ -909,20 +917,25 @@ class DeepWikiStore:
|
||||
conn = self._get_connection()
|
||||
result: dict[str, list[DeepWikiSymbol]] = {}
|
||||
|
||||
for path in paths:
|
||||
path_str = self._normalize_path(path)
|
||||
if not paths:
|
||||
return result
|
||||
|
||||
normalized = [self._normalize_path(p) for p in paths]
|
||||
placeholders = ",".join("?" * len(normalized))
|
||||
rows = conn.execute(
|
||||
"""
|
||||
f"""
|
||||
SELECT * FROM deepwiki_symbols
|
||||
WHERE source_file=?
|
||||
ORDER BY start_line
|
||||
WHERE source_file IN ({placeholders})
|
||||
ORDER BY source_file, start_line
|
||||
""",
|
||||
(path_str,),
|
||||
normalized,
|
||||
).fetchall()
|
||||
if rows:
|
||||
result[path_str] = [
|
||||
self._row_to_deepwiki_symbol(row) for row in rows
|
||||
]
|
||||
|
||||
for row in rows:
|
||||
sf = row["source_file"]
|
||||
result.setdefault(sf, []).append(
|
||||
self._row_to_deepwiki_symbol(row)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -509,14 +509,17 @@ Keep it minimal. Format as clean Markdown."""
|
||||
Returns:
|
||||
True if content passes validation, False otherwise.
|
||||
"""
|
||||
import re
|
||||
|
||||
if not content or len(content.strip()) < 20:
|
||||
return False
|
||||
|
||||
required = REQUIRED_SECTIONS.get(layer, ["Description"])
|
||||
content_lower = content.lower()
|
||||
|
||||
for section in required:
|
||||
if section.lower() not in content_lower:
|
||||
# Match markdown headers (##, ###, **Bold**) or standalone section names
|
||||
pattern = rf"^\s*(?:#{1,6}\s+|\*\*){re.escape(section)}"
|
||||
if not re.search(pattern, content, re.IGNORECASE | re.MULTILINE):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
Reference in New Issue
Block a user