chore: move 3 skills to ccw-skill-hub repository

Migrated to D:/ccw-skill-hub/skills/:
- project-analyze
- copyright-docs
- software-manual
This commit is contained in:
catlog22
2026-02-24 12:23:41 +08:00
parent 61e313a0c1
commit a859698c7d
6 changed files with 1068 additions and 0 deletions

View File

@@ -0,0 +1,163 @@
# Command: execute-fixes
> Applies fixes from fix-plan.json via code-developer subagents. Quick path = 1 agent; standard = 1 agent per group.
## When to Use
- Phase 3B of Fixer, after plan-fixes
- Requires: `${sessionFolder}/fix/fix-plan.json`, `sessionFolder`, `projectRoot`
## Strategy
**Mode**: Sequential Delegation (code-developer agents via Task)
```
quick_path=true -> 1 agent, all findings sequentially
quick_path=false -> 1 agent per group, groups in execution_order
```
## Execution Steps
### Step 1: Load Plan + Helpers
```javascript
const fixPlan = JSON.parse(Read(`${sessionFolder}/fix/fix-plan.json`))
const { groups, execution_order, quick_path: isQuickPath } = fixPlan
const results = { fixed: [], failed: [], skipped: [] }
// --- Agent prompt builder ---
function buildAgentPrompt(findings, files) {
const fileContents = {}
for (const file of files) { try { fileContents[file] = Read(file) } catch {} }
const fDesc = findings.map((f, i) => {
const fix = f.suggested_fix || f.optimization?.approach || '(no suggestion)'
const deps = (f.fix_dependencies||[]).length ? `\nDepends on: ${f.fix_dependencies.join(', ')}` : ''
return `### ${i+1}. ${f.id} [${f.severity}]\n**File**: ${f.location?.file}:${f.location?.line}\n**Title**: ${f.title}\n**Desc**: ${f.description}\n**Strategy**: ${f.fix_strategy||'minimal'}\n**Fix**: ${fix}${deps}`
}).join('\n\n')
const fContent = Object.entries(fileContents)
.filter(([,c]) => c).map(([f,c]) => `### ${f}\n\`\`\`\n${String(c).slice(0,8000)}\n\`\`\``).join('\n\n')
return `You are a code fixer agent. Apply fixes to the codebase.
## CRITICAL RULES
1. Apply each fix using Edit tool, in the order given (dependency-sorted)
2. After each fix, run related tests: tests/**/{filename}.test.* or *_test.*
3. Tests PASS -> finding is "fixed"
4. Tests FAIL -> revert: Bash("git checkout -- {file}") -> mark "failed" -> continue
5. Do NOT retry failed fixes with different strategy. Rollback and move on.
6. If a finding depends on a previously failed finding, mark "skipped"
## Findings (in order)
${fDesc}
## File Contents
${fContent}
## Required Output
After ALL findings, output JSON:
\`\`\`json
{"results":[{"id":"SEC-001","status":"fixed","file":"src/a.ts"},{"id":"COR-002","status":"failed","file":"src/b.ts","error":"reason"}]}
\`\`\`
Process each finding now. Rollback on failure, never retry.`
}
// --- Result parser ---
function parseAgentResults(output, findings) {
const failedIds = new Set()
let parsed = []
try {
const m = (output||'').match(/```json\s*\n?([\s\S]*?)\n?```/)
if (m) { const j = JSON.parse(m[1]); parsed = j.results || j || [] }
} catch {}
if (parsed.length > 0) {
for (const r of parsed) {
const f = findings.find(x => x.id === r.id); if (!f) continue
if (r.status === 'fixed') results.fixed.push({...f})
else if (r.status === 'failed') { results.failed.push({...f, error: r.error||'unknown'}); failedIds.add(r.id) }
else if (r.status === 'skipped') { results.skipped.push({...f, error: r.error||'dep failed'}); failedIds.add(r.id) }
}
} else {
// Fallback: check git diff per file
for (const f of findings) {
const file = f.location?.file
if (!file) { results.skipped.push({...f, error:'no file'}); continue }
const diff = Bash(`git diff --name-only -- "${file}" 2>/dev/null`).trim()
if (diff) results.fixed.push({...f})
else { results.failed.push({...f, error:'no changes detected'}); failedIds.add(f.id) }
}
}
// Catch unprocessed findings
const done = new Set([...results.fixed,...results.failed,...results.skipped].map(x=>x.id))
for (const f of findings) {
if (done.has(f.id)) continue
if ((f.fix_dependencies||[]).some(d => failedIds.has(d)))
results.skipped.push({...f, error:'dependency failed'})
else results.failed.push({...f, error:'not processed'})
}
}
```
### Step 2: Execute
```javascript
if (isQuickPath) {
// Single agent for all findings
const group = groups[0]
const prompt = buildAgentPrompt(group.findings, group.files)
const out = Task({ subagent_type:"code-developer", prompt, run_in_background:false })
parseAgentResults(out, group.findings)
} else {
// One agent per group in execution_order
const completedGroups = new Set()
// Build group dependency map
const groupDeps = {}
for (const g of groups) {
groupDeps[g.id] = new Set()
for (const f of g.findings) {
for (const depId of (f.fix_dependencies||[])) {
const dg = groups.find(x => x.findings.some(fx => fx.id === depId))
if (dg && dg.id !== g.id) groupDeps[g.id].add(dg.id)
}
}
}
for (const gid of execution_order) {
const group = groups.find(g => g.id === gid)
if (!group) continue
const prompt = buildAgentPrompt(group.findings, group.files)
const out = Task({ subagent_type:"code-developer", prompt, run_in_background:false })
parseAgentResults(out, group.findings)
completedGroups.add(gid)
Write(`${sessionFolder}/fix/fix-progress.json`, JSON.stringify({
completed_groups:[...completedGroups],
results_so_far:{fixed:results.fixed.length, failed:results.failed.length}
}, null, 2))
mcp__ccw-tools__team_msg({ operation:"log", team:"team-review", from:"fixer",
to:"coordinator", type:"fix_progress",
summary:`[fixer] Group ${gid}: ${results.fixed.length} fixed, ${results.failed.length} failed` })
}
}
```
### Step 3: Write Results
```javascript
Write(`${sessionFolder}/fix/execution-results.json`, JSON.stringify(results, null, 2))
```
## Error Handling
| Scenario | Resolution |
|----------|------------|
| Agent crashes | Mark group findings as failed, continue next group |
| Test failure after fix | Rollback (`git checkout -- {file}`), mark failed, continue |
| No structured output | Fallback to git diff detection |
| Dependency failed | Skip dependent findings automatically |
| fix-plan.json missing | Report error, write empty results |

View File

@@ -0,0 +1,187 @@
# Command: plan-fixes
> Deterministic grouping algorithm. Groups findings by file, merges dependent groups, topological sorts within groups, writes fix-plan.json.
## When to Use
- Phase 3A of Fixer, after context resolution
- Requires: `fixableFindings[]`, `sessionFolder`, `quickPath` from Phase 2
**Trigger conditions**:
- FIX-* task in Phase 3 with at least 1 fixable finding
## Strategy
**Mode**: Direct (inline execution, deterministic algorithm, no CLI needed)
## Execution Steps
### Step 1: Group Findings by Primary File
```javascript
const fileGroups = {}
for (const f of fixableFindings) {
const file = f.location?.file || '_unknown'
if (!fileGroups[file]) fileGroups[file] = []
fileGroups[file].push(f)
}
```
### Step 2: Merge Groups with Cross-File Dependencies
```javascript
// Build adjacency: if finding A (group X) depends on finding B (group Y), merge X into Y
const findingFileMap = {}
for (const f of fixableFindings) {
findingFileMap[f.id] = f.location?.file || '_unknown'
}
// Union-Find for group merging
const parent = {}
const find = (x) => parent[x] === x ? x : (parent[x] = find(parent[x]))
const union = (a, b) => { parent[find(a)] = find(b) }
const allFiles = Object.keys(fileGroups)
for (const file of allFiles) parent[file] = file
for (const f of fixableFindings) {
const myFile = f.location?.file || '_unknown'
for (const depId of (f.fix_dependencies || [])) {
const depFile = findingFileMap[depId]
if (depFile && depFile !== myFile) {
union(myFile, depFile)
}
}
}
// Collect merged groups
const mergedGroupMap = {}
for (const file of allFiles) {
const root = find(file)
if (!mergedGroupMap[root]) mergedGroupMap[root] = { files: [], findings: [] }
mergedGroupMap[root].files.push(file)
mergedGroupMap[root].findings.push(...fileGroups[file])
}
// Deduplicate files
for (const g of Object.values(mergedGroupMap)) {
g.files = [...new Set(g.files)]
}
```
### Step 3: Topological Sort Within Each Group
```javascript
function topoSort(findings) {
const idSet = new Set(findings.map(f => f.id))
const inDegree = {}
const adj = {}
for (const f of findings) {
inDegree[f.id] = 0
adj[f.id] = []
}
for (const f of findings) {
for (const depId of (f.fix_dependencies || [])) {
if (idSet.has(depId)) {
adj[depId].push(f.id)
inDegree[f.id]++
}
}
}
const queue = findings.filter(f => inDegree[f.id] === 0).map(f => f.id)
const sorted = []
while (queue.length > 0) {
const id = queue.shift()
sorted.push(id)
for (const next of adj[id]) {
inDegree[next]--
if (inDegree[next] === 0) queue.push(next)
}
}
// Handle cycles: append any unsorted findings at the end
const sortedSet = new Set(sorted)
for (const f of findings) {
if (!sortedSet.has(f.id)) sorted.push(f.id)
}
const findingMap = Object.fromEntries(findings.map(f => [f.id, f]))
return sorted.map(id => findingMap[id])
}
const groups = Object.entries(mergedGroupMap).map(([root, g], i) => {
const sorted = topoSort(g.findings)
const maxSev = sorted.reduce((max, f) => {
const ord = { critical: 0, high: 1, medium: 2, low: 3 }
return (ord[f.severity] ?? 4) < (ord[max] ?? 4) ? f.severity : max
}, 'low')
return {
id: `G${i + 1}`,
files: g.files,
findings: sorted,
max_severity: maxSev
}
})
```
### Step 4: Sort Groups by Max Severity
```javascript
const SEV_ORDER = { critical: 0, high: 1, medium: 2, low: 3 }
groups.sort((a, b) => (SEV_ORDER[a.max_severity] ?? 4) - (SEV_ORDER[b.max_severity] ?? 4))
// Re-assign IDs after sort
groups.forEach((g, i) => { g.id = `G${i + 1}` })
const execution_order = groups.map(g => g.id)
```
### Step 5: Determine Execution Path
```javascript
const totalFindings = fixableFindings.length
const totalGroups = groups.length
const isQuickPath = totalFindings <= 5 && totalGroups <= 1
```
### Step 6: Write fix-plan.json
```javascript
const fixPlan = {
plan_id: `fix-plan-${Date.now()}`,
quick_path: isQuickPath,
groups: groups.map(g => ({
id: g.id,
files: g.files,
findings: g.findings.map(f => ({
id: f.id, severity: f.severity, dimension: f.dimension,
title: f.title, description: f.description,
location: f.location, suggested_fix: f.suggested_fix,
fix_strategy: f.fix_strategy, fix_complexity: f.fix_complexity,
fix_dependencies: f.fix_dependencies,
root_cause: f.root_cause, optimization: f.optimization
})),
max_severity: g.max_severity
})),
execution_order: execution_order,
total_findings: totalFindings,
total_groups: totalGroups
}
Bash(`mkdir -p "${sessionFolder}/fix"`)
Write(`${sessionFolder}/fix/fix-plan.json`, JSON.stringify(fixPlan, null, 2))
mcp__ccw-tools__team_msg({ operation:"log", team:"team-review", from:"fixer",
to:"coordinator", type:"fix_progress",
summary:`[fixer] Fix plan: ${totalGroups} groups, ${totalFindings} findings, path=${isQuickPath ? 'quick' : 'standard'}` })
```
## Error Handling
| Scenario | Resolution |
|----------|------------|
| All findings share one file | Single group, likely quick path |
| Dependency cycle detected | Topo sort appends cycle members at end |
| Finding references unknown dependency | Ignore that dependency edge |
| Empty fixableFindings | Should not reach this command (checked in Phase 2) |