refactor: Update issue queue structure and commands

- Changed queue structure from 'queue' to 'tasks' in various files for clarity.
- Updated CLI commands to reflect new task ID usage instead of queue ID.
- Enhanced queue management with new delete functionality for historical queues.
- Improved metadata handling and task execution tracking.
- Updated dashboard and issue manager views to accommodate new task structure.
- Bumped version to 6.3.8 in package.json and package-lock.json.
This commit is contained in:
catlog22
2025-12-27 22:04:15 +08:00
parent 2e493277a1
commit b58589ddad
13 changed files with 394 additions and 336 deletions

View File

@@ -20,14 +20,7 @@ You are a specialized issue planning agent that combines exploration and plannin
```javascript
{
// Required
issues: [
{
id: string, // Issue ID (e.g., "GH-123")
title: string, // Issue title
description: string, // Issue description
context: string // Additional context from context.md
}
],
issue_ids: string[], // Issue IDs only (e.g., ["GH-123", "GH-124"])
project_root: string, // Project root path for ACE search
// Optional
@@ -36,6 +29,8 @@ You are a specialized issue planning agent that combines exploration and plannin
}
```
**Note**: Agent receives IDs only. Use `ccw issue status <id> --json` to fetch full details.
## Schema-Driven Output
**CRITICAL**: Read the solution schema first to determine output structure:
@@ -65,6 +60,31 @@ Phase 4: Validation & Output (15%)
## Phase 1: Issue Understanding
### Step 1: Fetch Issue Details via CLI
For each issue ID received, fetch full details:
```bash
ccw issue status <issue-id> --json
```
Returns:
```json
{
"issue": {
"id": "GH-123",
"title": "Add authentication",
"context": "...",
"affected_components": ["auth", "api"],
"lifecycle_requirements": { "test_strategy": "unit", "regression_scope": "affected" }
},
"solutions": [],
"bound": null
}
```
### Step 2: Analyze Issue
**Extract from each issue**:
- Title and description analysis
- Key requirements and constraints
@@ -661,6 +681,23 @@ function generateOutput(solutions, conflicts) {
}
```
### Solution Registration via CLI
**IMPORTANT**: Register solutions using CLI instead of direct file writes:
```bash
# 1. Write solution JSON to temp file
echo '<solution-json>' > /tmp/sol-{issue-id}.json
# 2. Register solution via CLI (auto-generates SOL-xxx ID)
ccw issue bind {issue-id} --solution /tmp/sol-{issue-id}.json
```
**CLI Output**: Returns registered solution ID for summary:
```
✓ Solution SOL-20251227-001 registered (5 tasks)
```
### Solution Schema (Closed-Loop Tasks)
Each task MUST include ALL 5 lifecycle phases:

View File

@@ -500,35 +500,35 @@ function canRunParallel(taskKey, groupTasks, taskGraph, conflicts) {
```javascript
function generateQueueItems(orderedTasks, taskGraph, conflicts) {
const queueItems = []
let queueIdCounter = 1
let itemIdCounter = 1
for (const key of orderedTasks) {
const node = taskGraph.get(key)
queueItems.push({
queue_id: `Q-${String(queueIdCounter++).padStart(3, '0')}`,
item_id: `T-${itemIdCounter++}`,
issue_id: node.issue_id,
solution_id: node.solution_id,
task_id: node.task.id,
status: 'pending',
execution_order: node.execution_order,
execution_group: node.execution_group,
depends_on: mapDependenciesToQueueIds(node, queueItems),
depends_on: mapDependenciesToItemIds(node, queueItems),
semantic_priority: node.semantic_priority,
queued_at: new Date().toISOString()
assigned_executor: node.task.executor || 'codex'
})
}
return queueItems
}
function mapDependenciesToQueueIds(node, queueItems) {
function mapDependenciesToItemIds(node, queueItems) {
return (node.task.depends_on || []).map(dep => {
const depKey = `${node.issue_id}:${dep}`
const queueItem = queueItems.find(q =>
q.issue_id === node.issue_id && q.task_id === dep
)
return queueItem?.queue_id || dep
return queueItem?.item_id || dep
})
}
```
@@ -538,7 +538,7 @@ function mapDependenciesToQueueIds(node, queueItems) {
```javascript
function generateOutput(queueItems, conflicts, groups) {
return {
queue: queueItems,
tasks: queueItems,
conflicts: conflicts.map(c => ({
type: c.type,
file: c.file,
@@ -652,10 +652,10 @@ function validateOrdering(queueItems, taskGraph) {
const node = taskGraph.get(key)
// Check dependencies come before
for (const depQueueId of item.depends_on) {
const depItem = queueItems.find(q => q.queue_id === depQueueId)
for (const depItemId of item.depends_on) {
const depItem = queueItems.find(q => q.item_id === depItemId)
if (depItem && depItem.execution_order >= item.execution_order) {
errors.push(`${item.queue_id} ordered before dependency ${depQueueId}`)
errors.push(`${item.item_id} ordered before dependency ${depItemId}`)
}
}
}
@@ -690,7 +690,7 @@ function validateOrdering(queueItems, taskGraph) {
5. Calculate semantic priority for all tasks
6. Validate ordering before output
7. Include rationale for conflict resolutions
8. Map depends_on to queue_ids in output
8. Map depends_on to item_ids in output
**NEVER**:
1. Execute tasks (ordering only)

View File

@@ -17,12 +17,14 @@ Execution orchestrator that coordinates codex instances. Each task is executed b
- No file reading in codex
- Orchestrator manages parallelism
## Storage Structure (Flat JSONL)
## Storage Structure (Queue History)
```
.workflow/issues/
├── issues.jsonl # All issues (one per line)
├── queue.json # Execution queue
├── queues/ # Queue history directory
│ ├── index.json # Queue index (active + history)
│ └── {queue-id}.json # Individual queue files
└── solutions/
├── {issue-id}.jsonl # Solutions for issue
└── ...
@@ -78,19 +80,19 @@ Phase 4: Completion
### Phase 1: Queue Loading
```javascript
// Load queue
const queuePath = '.workflow/issues/queue.json';
if (!Bash(`test -f "${queuePath}" && echo exists`).includes('exists')) {
console.log('No queue found. Run /issue:queue first.');
// Load active queue via CLI endpoint
const queueJson = Bash(`ccw issue status --json 2>/dev/null || echo '{}'`);
const queue = JSON.parse(queueJson);
if (!queue.id || queue.tasks?.length === 0) {
console.log('No active queue found. Run /issue:queue first.');
return;
}
const queue = JSON.parse(Read(queuePath));
// Count by status
const pending = queue.queue.filter(q => q.status === 'pending');
const executing = queue.queue.filter(q => q.status === 'executing');
const completed = queue.queue.filter(q => q.status === 'completed');
const pending = queue.tasks.filter(q => q.status === 'pending');
const executing = queue.tasks.filter(q => q.status === 'executing');
const completed = queue.tasks.filter(q => q.status === 'completed');
console.log(`
## Execution Queue Status
@@ -98,7 +100,7 @@ console.log(`
- Pending: ${pending.length}
- Executing: ${executing.length}
- Completed: ${completed.length}
- Total: ${queue.queue.length}
- Total: ${queue.tasks.length}
`);
if (pending.length === 0 && executing.length === 0) {
@@ -113,10 +115,10 @@ if (pending.length === 0 && executing.length === 0) {
// Find ready tasks (dependencies satisfied)
function getReadyTasks() {
const completedIds = new Set(
queue.queue.filter(q => q.status === 'completed').map(q => q.queue_id)
queue.tasks.filter(q => q.status === 'completed').map(q => q.item_id)
);
return queue.queue.filter(item => {
return queue.tasks.filter(item => {
if (item.status !== 'pending') return false;
return item.depends_on.every(depId => completedIds.has(depId));
});
@@ -141,9 +143,9 @@ readyTasks.sort((a, b) => a.execution_order - b.execution_order);
// Initialize TodoWrite
TodoWrite({
todos: readyTasks.slice(0, parallelLimit).map(t => ({
content: `[${t.queue_id}] ${t.issue_id}:${t.task_id}`,
content: `[${t.item_id}] ${t.issue_id}:${t.task_id}`,
status: 'pending',
activeForm: `Executing ${t.queue_id}`
activeForm: `Executing ${t.item_id}`
}))
});
```
@@ -207,7 +209,7 @@ This returns JSON with full lifecycle definition:
### Step 3: Report Completion
When ALL phases complete successfully:
\`\`\`bash
ccw issue complete <queue_id> --result '{
ccw issue complete <item_id> --result '{
"files_modified": ["path1", "path2"],
"tests_passed": true,
"regression_passed": true,
@@ -220,7 +222,7 @@ ccw issue complete <queue_id> --result '{
If any phase fails and cannot be fixed:
\`\`\`bash
ccw issue fail <queue_id> --reason "Phase X failed: <details>"
ccw issue fail <item_id> --reason "Phase X failed: <details>"
\`\`\`
### Rules
@@ -239,12 +241,12 @@ Begin by running: ccw issue next
if (executor === 'codex') {
Bash(
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool codex --mode write --id exec-${queueItem.queue_id}`,
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool codex --mode write --id exec-${queueItem.item_id}`,
timeout=3600000 // 1 hour timeout
);
} else if (executor === 'gemini') {
Bash(
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool gemini --mode write --id exec-${queueItem.queue_id}`,
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool gemini --mode write --id exec-${queueItem.item_id}`,
timeout=1800000 // 30 min timeout
);
} else {
@@ -252,7 +254,7 @@ Begin by running: ccw issue next
Task(
subagent_type="code-developer",
run_in_background=false,
description=`Execute ${queueItem.queue_id}`,
description=`Execute ${queueItem.item_id}`,
prompt=codexPrompt
);
}
@@ -265,23 +267,23 @@ for (let i = 0; i < readyTasks.length; i += parallelLimit) {
const batch = readyTasks.slice(i, i + parallelLimit);
console.log(`\n### Executing Batch ${Math.floor(i / parallelLimit) + 1}`);
console.log(batch.map(t => `- ${t.queue_id}: ${t.issue_id}:${t.task_id}`).join('\n'));
console.log(batch.map(t => `- ${t.item_id}: ${t.issue_id}:${t.task_id}`).join('\n'));
if (parallelLimit === 1) {
// Sequential execution
for (const task of batch) {
updateTodo(task.queue_id, 'in_progress');
updateTodo(task.item_id, 'in_progress');
await executeTask(task);
updateTodo(task.queue_id, 'completed');
updateTodo(task.item_id, 'completed');
}
} else {
// Parallel execution - launch all at once
const executions = batch.map(task => {
updateTodo(task.queue_id, 'in_progress');
updateTodo(task.item_id, 'in_progress');
return executeTask(task);
});
await Promise.all(executions);
batch.forEach(task => updateTodo(task.queue_id, 'completed'));
batch.forEach(task => updateTodo(task.item_id, 'completed'));
}
// Refresh ready tasks after batch
@@ -298,7 +300,7 @@ When codex calls `ccw issue next`, it receives:
```json
{
"queue_id": "Q-001",
"item_id": "T-1",
"issue_id": "GH-123",
"solution_id": "SOL-001",
"task": {
@@ -336,60 +338,38 @@ When codex calls `ccw issue next`, it receives:
### Phase 4: Completion Summary
```javascript
// Reload queue for final status
const finalQueue = JSON.parse(Read(queuePath));
// Reload queue for final status via CLI
const finalQueueJson = Bash(`ccw issue status --json 2>/dev/null || echo '{}'`);
const finalQueue = JSON.parse(finalQueueJson);
const summary = {
completed: finalQueue.queue.filter(q => q.status === 'completed').length,
failed: finalQueue.queue.filter(q => q.status === 'failed').length,
pending: finalQueue.queue.filter(q => q.status === 'pending').length,
total: finalQueue.queue.length
// Use queue._metadata for summary (already calculated by CLI)
const summary = finalQueue._metadata || {
completed_count: 0,
failed_count: 0,
pending_count: 0,
total_tasks: 0
};
console.log(`
## Execution Complete
**Completed**: ${summary.completed}/${summary.total}
**Failed**: ${summary.failed}
**Pending**: ${summary.pending}
**Completed**: ${summary.completed_count}/${summary.total_tasks}
**Failed**: ${summary.failed_count}
**Pending**: ${summary.pending_count}
### Task Results
${finalQueue.queue.map(q => {
${(finalQueue.tasks || []).map(q => {
const icon = q.status === 'completed' ? '✓' :
q.status === 'failed' ? '✗' :
q.status === 'executing' ? '⟳' : '○';
return `${icon} ${q.queue_id} [${q.issue_id}:${q.task_id}] - ${q.status}`;
return `${icon} ${q.item_id} [${q.issue_id}:${q.task_id}] - ${q.status}`;
}).join('\n')}
`);
// Update issue statuses in issues.jsonl
const issuesPath = '.workflow/issues/issues.jsonl';
const allIssues = Bash(`cat "${issuesPath}"`)
.split('\n')
.filter(line => line.trim())
.map(line => JSON.parse(line));
// Issue status updates are handled by ccw issue complete/fail endpoints
// No need to manually update issues.jsonl here
const issueIds = [...new Set(finalQueue.queue.map(q => q.issue_id))];
for (const issueId of issueIds) {
const issueTasks = finalQueue.queue.filter(q => q.issue_id === issueId);
if (issueTasks.every(q => q.status === 'completed')) {
console.log(`\n✓ Issue ${issueId} fully completed!`);
// Update issue status
const issueIndex = allIssues.findIndex(i => i.id === issueId);
if (issueIndex !== -1) {
allIssues[issueIndex].status = 'completed';
allIssues[issueIndex].completed_at = new Date().toISOString();
allIssues[issueIndex].updated_at = new Date().toISOString();
}
}
}
// Write updated issues.jsonl
Write(issuesPath, allIssues.map(i => JSON.stringify(i)).join('\n'));
if (summary.pending > 0) {
if (summary.pending_count > 0) {
console.log(`
### Continue Execution
Run \`/issue:execute\` again to execute remaining tasks.
@@ -405,7 +385,7 @@ if (flags.dryRun) {
## Dry Run - Would Execute
${readyTasks.map((t, i) => `
${i + 1}. ${t.queue_id}
${i + 1}. ${t.item_id}
Issue: ${t.issue_id}
Task: ${t.task_id}
Executor: ${t.assigned_executor}
@@ -426,7 +406,32 @@ No changes made. Remove --dry-run to execute.
| No ready tasks | Check dependencies, show blocked tasks |
| Codex timeout | Mark as failed, allow retry |
| ccw issue next empty | All tasks done or blocked |
| Task execution failure | Marked via ccw issue fail |
| Task execution failure | Marked via ccw issue fail, use `ccw issue retry` to reset |
## Troubleshooting
### Interrupted Tasks
If execution was interrupted (crashed/stopped), `ccw issue next` will automatically resume:
```bash
# Automatically returns the executing task for resumption
ccw issue next
```
Tasks in `executing` status are prioritized and returned first, no manual reset needed.
### Failed Tasks
If a task failed and you want to retry:
```bash
# Reset all failed tasks to pending
ccw issue retry
# Reset failed tasks for specific issue
ccw issue retry <issue-id>
```
## Endpoint Contract
@@ -435,16 +440,20 @@ No changes made. Remove --dry-run to execute.
- Marks task as 'executing'
- Returns `{ status: 'empty' }` when no tasks
### `ccw issue complete <queue-id>`
### `ccw issue complete <item-id>`
- Marks task as 'completed'
- Updates queue.json
- Checks if issue is fully complete
### `ccw issue fail <queue-id>`
### `ccw issue fail <item-id>`
- Marks task as 'failed'
- Records failure reason
- Allows retry via /issue:execute
### `ccw issue retry [issue-id]`
- Resets failed tasks to 'pending'
- Allows re-execution via `ccw issue next`
## Related Commands
- `/issue:plan` - Plan issues with solutions

View File

@@ -30,7 +30,7 @@ ccw issue task <id> --title "..." # Add task
ccw issue queue # List queue
ccw issue queue add <id> # Add to queue
ccw issue next # Get next task
ccw issue done <queue-id> # Complete task
ccw issue complete <item-id> # Complete task
```
## Usage
@@ -561,7 +561,7 @@ async function deleteIssueInteractive(issueId) {
const queuePath = '.workflow/issues/queue.json';
if (Bash(`test -f "${queuePath}" && echo exists`) === 'exists') {
const queue = JSON.parse(Bash(`cat "${queuePath}"`));
queue.queue = queue.queue.filter(q => q.issue_id !== issueId);
queue.tasks = queue.tasks.filter(q => q.issue_id !== issueId);
Write(queuePath, JSON.stringify(queue, null, 2));
}

View File

@@ -75,26 +75,16 @@ Phase 4: Summary
## Implementation
### Phase 1: Issue Loading
### Phase 1: Issue Loading (IDs Only)
```javascript
// Parse input and flags
const issuesPath = '.workflow/issues/issues.jsonl';
const batchSize = flags.batchSize || 3;
// Key fields for planning (avoid loading full issue data)
const PLAN_FIELDS = 'id,title,status,context,affected_components,lifecycle_requirements,priority,bound_solution_id';
let issueIds = [];
if (flags.allPending) {
// Use jq to filter pending/registered issues - extract only IDs
const pendingIds = Bash(`
cat "${issuesPath}" 2>/dev/null | \\
jq -r 'select(.status == "pending" or .status == "registered") | .id' 2>/dev/null || echo ''
`).trim();
issueIds = pendingIds ? pendingIds.split('\n').filter(Boolean) : [];
// Get pending issue IDs directly via CLI
const ids = Bash(`ccw issue list --status pending,registered --ids`).trim();
issueIds = ids ? ids.split('\n').filter(Boolean) : [];
if (issueIds.length === 0) {
console.log('No pending issues found.');
@@ -106,50 +96,27 @@ if (flags.allPending) {
issueIds = userInput.includes(',')
? userInput.split(',').map(s => s.trim())
: [userInput.trim()];
}
// Load issues using jq to extract only key fields
const issues = [];
for (const id of issueIds) {
// Use jq to find issue by ID and extract only needed fields
const issueJson = Bash(`
cat "${issuesPath}" 2>/dev/null | \\
jq -c 'select(.id == "${id}") | {${PLAN_FIELDS}}' 2>/dev/null | head -1
`).trim();
let issue;
if (issueJson) {
issue = JSON.parse(issueJson);
} else {
console.log(`Issue ${id} not found. Creating...`);
issue = {
id,
title: `Issue ${id}`,
status: 'registered',
priority: 3,
context: '',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
};
// Append to issues.jsonl
Bash(`echo '${JSON.stringify(issue)}' >> "${issuesPath}"`);
// Create if not exists
for (const id of issueIds) {
Bash(`ccw issue init ${id} --title "Issue ${id}" 2>/dev/null || true`);
}
issues.push(issue);
}
// Group into batches
const batches = [];
for (let i = 0; i < issues.length; i += batchSize) {
batches.push(issues.slice(i, i + batchSize));
for (let i = 0; i < issueIds.length; i += batchSize) {
batches.push(issueIds.slice(i, i + batchSize));
}
console.log(`Processing ${issues.length} issues in ${batches.length} batch(es)`);
console.log(`Processing ${issueIds.length} issues in ${batches.length} batch(es)`);
TodoWrite({
todos: batches.flatMap((batch, i) => [
{ content: `Plan batch ${i+1}`, status: 'pending', activeForm: `Planning batch ${i+1}` }
])
todos: batches.map((_, i) => ({
content: `Plan batch ${i+1}`,
status: 'pending',
activeForm: `Planning batch ${i+1}`
}))
});
```
@@ -162,36 +129,47 @@ Bash(`mkdir -p .workflow/issues/solutions`);
for (const [batchIndex, batch] of batches.entries()) {
updateTodo(`Plan batch ${batchIndex + 1}`, 'in_progress');
// Build issue prompt for agent - agent writes solutions directly
// Build issue prompt for agent - pass IDs only, agent fetches details
const issuePrompt = `
## Issues to Plan (Closed-Loop Tasks Required)
${batch.map((issue, i) => `
### Issue ${i + 1}: ${issue.id}
**Title**: ${issue.title}
**Context**: ${issue.context || 'No context provided'}
**Affected Components**: ${issue.affected_components?.join(', ') || 'Not specified'}
**Issue IDs**: ${batch.join(', ')}
**Lifecycle Requirements**:
- Test Strategy: ${issue.lifecycle_requirements?.test_strategy || 'auto'}
- Regression Scope: ${issue.lifecycle_requirements?.regression_scope || 'affected'}
- Commit Strategy: ${issue.lifecycle_requirements?.commit_strategy || 'per-task'}
`).join('\n')}
### Step 1: Fetch Issue Details
For each issue ID, use CLI to get full details:
\`\`\`bash
ccw issue status <issue-id> --json
\`\`\`
Returns:
\`\`\`json
{
"issue": { "id", "title", "context", "affected_components", "lifecycle_requirements", ... },
"solutions": [...],
"bound": null
}
\`\`\`
## Project Root
${process.cwd()}
## Output Requirements
**IMPORTANT**: Write solutions DIRECTLY to files, do NOT return full solution content.
**IMPORTANT**: Register solutions via CLI, do NOT write files directly.
### 1. Write Solution Files
For each issue, write solution to: \`.workflow/issues/solutions/{issue-id}.jsonl\`
- Append one JSON line per solution
### 1. Register Solutions via CLI
For each issue, save solution to temp file and register via CLI:
\`\`\`bash
# Write solution JSON to temp file
echo '<solution-json>' > /tmp/sol-{issue-id}.json
# Register solution via CLI (generates SOL-xxx ID automatically)
ccw issue bind {issue-id} --solution /tmp/sol-{issue-id}.json
\`\`\`
- Solution must include all closed-loop task fields (see Solution Format below)
### 2. Return Summary Only
After writing solutions, return ONLY a brief JSON summary:
After registering solutions, return ONLY a brief JSON summary:
\`\`\`json
{
"planned": [
@@ -271,31 +249,34 @@ Each task MUST include ALL lifecycle phases:
// Collect issues needing user selection (multiple solutions)
const needSelection = [];
for (const issue of issues) {
const solPath = `.workflow/issues/solutions/${issue.id}.jsonl`;
for (const issueId of issueIds) {
// Get solutions via CLI
const statusJson = Bash(`ccw issue status ${issueId} --json 2>/dev/null || echo '{}'`).trim();
const status = JSON.parse(statusJson);
const solutions = status.solutions || [];
// Use jq to count solutions
const count = parseInt(Bash(`cat "${solPath}" 2>/dev/null | jq -s 'length' 2>/dev/null || echo '0'`).trim()) || 0;
if (solutions.length === 0) continue; // No solutions - skip silently (agent already reported)
if (count === 0) continue; // No solutions - skip silently (agent already reported)
if (count === 1) {
if (solutions.length === 1) {
// Auto-bind single solution
const solId = Bash(`cat "${solPath}" | jq -r '.id' | head -1`).trim();
bindSolution(issue.id, solId);
bindSolution(issueId, solutions[0].id);
} else {
// Multiple solutions - collect for batch selection
const options = Bash(`cat "${solPath}" | jq -c '{id, description, task_count: (.tasks | length)}'`).trim();
needSelection.push({ issue, options: options.split('\n').map(s => JSON.parse(s)) });
const options = solutions.map(s => ({
id: s.id,
description: s.description,
task_count: (s.tasks || []).length
}));
needSelection.push({ issueId, options });
}
}
// Batch ask user for multiple-solution issues
if (needSelection.length > 0) {
const answer = AskUserQuestion({
questions: needSelection.map(({ issue, options }) => ({
question: `Select solution for ${issue.id}:`,
header: issue.id,
questions: needSelection.map(({ issueId, options }) => ({
question: `Select solution for ${issueId}:`,
header: issueId,
multiSelect: false,
options: options.map(s => ({
label: `${s.id} (${s.task_count} tasks)`,
@@ -305,47 +286,27 @@ if (needSelection.length > 0) {
});
// Bind selected solutions
for (const { issue } of needSelection) {
const selectedSolId = extractSelectedSolutionId(answer, issue.id);
if (selectedSolId) bindSolution(issue.id, selectedSolId);
for (const { issueId } of needSelection) {
const selectedSolId = extractSelectedSolutionId(answer, issueId);
if (selectedSolId) bindSolution(issueId, selectedSolId);
}
}
// Helper: bind solution to issue
// Helper: bind solution to issue (using CLI for safety)
function bindSolution(issueId, solutionId) {
const now = new Date().toISOString();
const solPath = `.workflow/issues/solutions/${issueId}.jsonl`;
// Update issue status
Bash(`
tmpfile=$(mktemp) && \\
cat "${issuesPath}" | jq -c 'if .id == "${issueId}" then . + {
bound_solution_id: "${solutionId}", status: "planned",
planned_at: "${now}", updated_at: "${now}"
} else . end' > "$tmpfile" && mv "$tmpfile" "${issuesPath}"
`);
// Mark solution as bound
Bash(`
tmpfile=$(mktemp) && \\
cat "${solPath}" | jq -c 'if .id == "${solutionId}" then . + {
is_bound: true, bound_at: "${now}"
} else . + {is_bound: false} end' > "$tmpfile" && mv "$tmpfile" "${solPath}"
`);
Bash(`ccw issue bind ${issueId} ${solutionId}`);
}
```
### Phase 4: Summary
```javascript
// Brief summary using jq
const stats = Bash(`
cat "${issuesPath}" 2>/dev/null | \\
jq -s '[.[] | select(.status == "planned")] | length' 2>/dev/null || echo '0'
`).trim();
// Count planned issues via CLI
const plannedIds = Bash(`ccw issue list --status planned --ids`).trim();
const plannedCount = plannedIds ? plannedIds.split('\n').length : 0;
console.log(`
## Done: ${issues.length} issues → ${stats} planned
## Done: ${issueIds.length} issues → ${plannedCount} planned
Next: \`/issue:queue\`\`/issue:execute\`
`);

View File

@@ -77,10 +77,12 @@ Queue formation command using **issue-queue-agent** that analyzes all bound solu
# Flags
--issue <id> Form queue for specific issue only
--append <id> Append issue to active queue (don't create new)
--list List all queues with status
--switch <queue-id> Switch active queue
--archive Archive current queue (mark completed)
--clear <queue-id> Delete a queue from history
# CLI subcommands (ccw issue queue ...)
ccw issue queue list List all queues with status
ccw issue queue switch <queue-id> Switch active queue
ccw issue queue archive Archive current queue
ccw issue queue delete <queue-id> Delete queue from history
```
## Execution Process
@@ -234,7 +236,7 @@ Write(issuesPath, updatedIssues.map(i => JSON.stringify(i)).join('\n'));
console.log(`
## Queue Formed
**Total Tasks**: ${queueOutput.queue.length}
**Total Tasks**: ${queueOutput.tasks.length}
**Issues**: ${plannedIssues.length}
**Conflicts**: ${queueOutput.conflicts?.length || 0} (${queueOutput._metadata?.resolved_conflicts || 0} resolved)
@@ -256,14 +258,13 @@ Output `queues/{queue-id}.json`:
```json
{
"id": "QUE-20251227-143000",
"name": "Auth Feature Queue",
"status": "active",
"issue_ids": ["GH-123", "GH-124"],
"queue": [
"tasks": [
{
"queue_id": "Q-001",
"item_id": "T-1",
"issue_id": "GH-123",
"solution_id": "SOL-001",
"task_id": "T1",
@@ -271,8 +272,7 @@ Output `queues/{queue-id}.json`:
"execution_order": 1,
"execution_group": "P1",
"depends_on": [],
"semantic_priority": 0.7,
"queued_at": "2025-12-26T10:00:00Z"
"semantic_priority": 0.7
}
],
@@ -289,17 +289,16 @@ Output `queues/{queue-id}.json`:
],
"execution_groups": [
{ "id": "P1", "type": "parallel", "task_count": 3, "tasks": ["GH-123:T1", "GH-124:T1", "GH-125:T1"] },
{ "id": "S2", "type": "sequential", "task_count": 2, "tasks": ["GH-123:T2", "GH-124:T2"] }
{ "id": "P1", "type": "parallel", "task_count": 3, "tasks": ["T-1", "T-2", "T-3"] },
{ "id": "S2", "type": "sequential", "task_count": 2, "tasks": ["T-4", "T-5"] }
],
"_metadata": {
"version": "2.0",
"version": "2.1-optimized",
"total_tasks": 5,
"pending_count": 3,
"completed_count": 2,
"failed_count": 0,
"created_at": "2025-12-26T10:00:00Z",
"updated_at": "2025-12-26T11:00:00Z",
"source": "issue-queue-agent"
}

View File

@@ -21,7 +21,7 @@ WHILE task exists:
- TEST: Run task.test commands
- VERIFY: Check task.acceptance criteria
- COMMIT: Stage files, commit with task.commit.message_template
3. Report completion via ccw issue complete <queue_id>
3. Report completion via ccw issue complete <item_id>
4. Fetch next task via ccw issue next
WHEN queue empty:
@@ -37,7 +37,7 @@ ccw issue next
```
This returns JSON with the full task definition:
- `queue_id`: Unique ID for queue tracking (e.g., "Q-001")
- `item_id`: Unique task identifier in queue (e.g., "T-1")
- `issue_id`: Parent issue ID (e.g., "ISSUE-20251227-001")
- `task`: Full task definition with implementation steps
- `context`: Relevant files and patterns
@@ -51,7 +51,7 @@ Expected task structure:
```json
{
"queue_id": "Q-001",
"item_id": "T-1",
"issue_id": "ISSUE-20251227-001",
"solution_id": "SOL-001",
"task": {
@@ -159,7 +159,7 @@ git add path/to/file1.ts path/to/file2.ts ...
git commit -m "$(cat <<'EOF'
[task.commit.message_template]
Queue-ID: [queue_id]
Item-ID: [item_id]
Issue-ID: [issue_id]
Task-ID: [task.id]
EOF
@@ -180,7 +180,7 @@ EOF
After commit succeeds, report to queue system:
```bash
ccw issue complete [queue_id] --result '{
ccw issue complete [item_id] --result '{
"files_modified": ["path1", "path2"],
"tests_passed": true,
"acceptance_passed": true,
@@ -193,7 +193,7 @@ ccw issue complete [queue_id] --result '{
**If task failed and cannot be fixed:**
```bash
ccw issue fail [queue_id] --reason "Phase [X] failed: [details]"
ccw issue fail [item_id] --reason "Phase [X] failed: [details]"
```
## Step 5: Continue to Next Task
@@ -206,7 +206,7 @@ ccw issue next
**Output progress:**
```
✓ [N/M] Completed: [queue_id] - [task.title]
✓ [N/M] Completed: [item_id] - [task.title]
→ Fetching next task...
```
@@ -221,10 +221,10 @@ When `ccw issue next` returns `{ "status": "empty" }`:
**Total Tasks Executed**: N
**All Commits**:
| # | Queue ID | Task | Commit |
|---|----------|------|--------|
| 1 | Q-001 | Task title | abc123 |
| 2 | Q-002 | Task title | def456 |
| # | Item ID | Task | Commit |
|---|---------|------|--------|
| 1 | T-1 | Task title | abc123 |
| 2 | T-2 | Task title | def456 |
**Files Modified**:
- path/to/file1.ts

View File

@@ -277,6 +277,7 @@ export function run(argv: string[]): void {
.option('--priority <n>', 'Task priority (1-5)')
.option('--format <fmt>', 'Output format: json, markdown')
.option('--json', 'Output as JSON')
.option('--ids', 'List only IDs (one per line, for scripting)')
.option('--force', 'Force operation')
// New options for solution/queue management
.option('--solution <path>', 'Solution JSON file path')

View File

@@ -5,7 +5,7 @@
*/
import chalk from 'chalk';
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from 'fs';
import { join, resolve } from 'path';
// Handle EPIPE errors gracefully
@@ -29,6 +29,18 @@ interface Issue {
source?: string;
source_url?: string;
labels?: string[];
// Agent workflow fields
affected_components?: string[];
lifecycle_requirements?: {
test_strategy?: 'unit' | 'integration' | 'e2e' | 'auto';
regression_scope?: 'full' | 'related' | 'affected';
commit_strategy?: 'per-task' | 'atomic' | 'squash';
};
problem_statement?: string;
expected_behavior?: string;
actual_behavior?: string;
reproduction_steps?: string[];
// Timestamps
created_at: string;
updated_at: string;
planned_at?: string;
@@ -100,17 +112,17 @@ interface Solution {
}
interface QueueItem {
queue_id: string;
item_id: string; // Task item ID in queue: T-1, T-2, ... (formerly queue_id)
issue_id: string;
solution_id: string;
task_id: string;
title?: string;
status: 'pending' | 'ready' | 'executing' | 'completed' | 'failed' | 'blocked';
execution_order: number;
execution_group: string;
depends_on: string[];
semantic_priority: number;
assigned_executor: 'codex' | 'gemini' | 'agent';
queued_at: string;
started_at?: string;
completed_at?: string;
result?: Record<string, any>;
@@ -118,11 +130,11 @@ interface QueueItem {
}
interface Queue {
id: string; // Queue unique ID: QUE-YYYYMMDD-HHMMSS
id: string; // Queue unique ID: QUE-YYYYMMDD-HHMMSS (derived from filename)
name?: string; // Optional queue name
status: 'active' | 'completed' | 'archived' | 'failed';
issue_ids: string[]; // Issues in this queue
queue: QueueItem[];
tasks: QueueItem[]; // Task items (formerly 'queue')
conflicts: any[];
execution_groups?: any[];
_metadata: {
@@ -132,13 +144,12 @@ interface Queue {
executing_count: number;
completed_count: number;
failed_count: number;
created_at: string;
updated_at: string;
};
}
interface QueueIndex {
active_queue_id: string | null;
active_item_id: string | null;
queues: {
id: string;
status: string;
@@ -162,6 +173,7 @@ interface IssueOptions {
json?: boolean;
force?: boolean;
fail?: boolean;
ids?: boolean; // List only IDs (one per line)
}
const ISSUES_DIR = '.workflow/issues';
@@ -278,7 +290,7 @@ function ensureQueuesDir(): void {
function readQueueIndex(): QueueIndex {
const path = join(getQueuesDir(), 'index.json');
if (!existsSync(path)) {
return { active_queue_id: null, queues: [] };
return { active_item_id: null, queues: [] };
}
return JSON.parse(readFileSync(path, 'utf-8'));
}
@@ -319,16 +331,15 @@ function createEmptyQueue(): Queue {
id: generateQueueFileId(),
status: 'active',
issue_ids: [],
queue: [],
tasks: [],
conflicts: [],
_metadata: {
version: '2.0',
version: '2.1',
total_tasks: 0,
pending_count: 0,
executing_count: 0,
completed_count: 0,
failed_count: 0,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
}
};
@@ -338,11 +349,11 @@ function writeQueue(queue: Queue): void {
ensureQueuesDir();
// Update metadata counts
queue._metadata.total_tasks = queue.queue.length;
queue._metadata.pending_count = queue.queue.filter(q => q.status === 'pending').length;
queue._metadata.executing_count = queue.queue.filter(q => q.status === 'executing').length;
queue._metadata.completed_count = queue.queue.filter(q => q.status === 'completed').length;
queue._metadata.failed_count = queue.queue.filter(q => q.status === 'failed').length;
queue._metadata.total_tasks = queue.tasks.length;
queue._metadata.pending_count = queue.tasks.filter(q => q.status === 'pending').length;
queue._metadata.executing_count = queue.tasks.filter(q => q.status === 'executing').length;
queue._metadata.completed_count = queue.tasks.filter(q => q.status === 'completed').length;
queue._metadata.failed_count = queue.tasks.filter(q => q.status === 'failed').length;
queue._metadata.updated_at = new Date().toISOString();
// Write queue file
@@ -359,7 +370,7 @@ function writeQueue(queue: Queue): void {
issue_ids: queue.issue_ids,
total_tasks: queue._metadata.total_tasks,
completed_tasks: queue._metadata.completed_count,
created_at: queue._metadata.created_at,
created_at: queue.id.replace('QUE-', '').replace(/(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/, '$1-$2-$3T$4:$5:$6Z'), // Derive from ID
completed_at: queue.status === 'completed' ? new Date().toISOString() : undefined
};
@@ -377,11 +388,11 @@ function writeQueue(queue: Queue): void {
}
function generateQueueItemId(queue: Queue): string {
const maxNum = queue.queue.reduce((max, q) => {
const match = q.queue_id.match(/^Q-(\d+)$/);
const maxNum = queue.tasks.reduce((max, q) => {
const match = q.item_id.match(/^T-(\d+)$/);
return match ? Math.max(max, parseInt(match[1])) : max;
}, 0);
return `Q-${String(maxNum + 1).padStart(3, '0')}`;
return `T-${maxNum + 1}`;
}
// ============ Commands ============
@@ -429,7 +440,19 @@ async function initAction(issueId: string | undefined, options: IssueOptions): P
async function listAction(issueId: string | undefined, options: IssueOptions): Promise<void> {
if (!issueId) {
// List all issues
const issues = readIssues();
let issues = readIssues();
// Filter by status if specified
if (options.status) {
const statuses = options.status.split(',').map(s => s.trim());
issues = issues.filter(i => statuses.includes(i.status));
}
// IDs only mode (one per line, for scripting)
if (options.ids) {
issues.forEach(i => console.log(i.id));
return;
}
if (options.json) {
console.log(JSON.stringify(issues, null, 2));
@@ -519,7 +542,8 @@ async function statusAction(issueId: string | undefined, options: IssueOptions):
const index = readQueueIndex();
if (options.json) {
console.log(JSON.stringify({ queue: queue._metadata, issues: issues.length, queues: index.queues.length }, null, 2));
// Return full queue for programmatic access
console.log(JSON.stringify(queue, null, 2));
return;
}
@@ -806,7 +830,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
// Archive current queue
if (subAction === 'archive') {
const queue = readActiveQueue();
if (!queue.id || queue.queue.length === 0) {
if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue to archive'));
return;
}
@@ -822,6 +846,31 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
return;
}
// Delete queue from history
if ((subAction === 'clear' || subAction === 'delete') && issueId) {
const queueId = issueId; // issueId is actually queue ID here
const queuePath = join(getQueuesDir(), `${queueId}.json`);
if (!existsSync(queuePath)) {
console.error(chalk.red(`Queue "${queueId}" not found`));
process.exit(1);
}
// Remove from index
const index = readQueueIndex();
index.queues = index.queues.filter(q => q.id !== queueId);
if (index.active_queue_id === queueId) {
index.active_queue_id = null;
}
writeQueueIndex(index);
// Delete queue file
unlinkSync(queuePath);
console.log(chalk.green(`✓ Deleted queue ${queueId}`));
return;
}
// Add issue tasks to queue
if (subAction === 'add' && issueId) {
const issue = findIssue(issueId);
@@ -839,7 +888,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
// Get or create active queue (create new if current is completed/archived)
let queue = readActiveQueue();
const isNewQueue = queue.queue.length === 0 || queue.status !== 'active';
const isNewQueue = queue.tasks.length === 0 || queue.status !== 'active';
if (queue.status !== 'active') {
// Create new queue if current is not active
@@ -853,24 +902,23 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
let added = 0;
for (const task of solution.tasks) {
const exists = queue.queue.some(q => q.issue_id === issueId && q.task_id === task.id);
const exists = queue.tasks.some(q => q.issue_id === issueId && q.task_id === task.id);
if (exists) continue;
queue.queue.push({
queue_id: generateQueueItemId(queue),
queue.tasks.push({
item_id: generateQueueItemId(queue),
issue_id: issueId,
solution_id: solution.id,
task_id: task.id,
status: 'pending',
execution_order: queue.queue.length + 1,
execution_order: queue.tasks.length + 1,
execution_group: 'P1',
depends_on: task.depends_on.map(dep => {
const depItem = queue.queue.find(q => q.task_id === dep && q.issue_id === issueId);
return depItem?.queue_id || dep;
const depItem = queue.tasks.find(q => q.task_id === dep && q.issue_id === issueId);
return depItem?.item_id || dep;
}),
semantic_priority: 0.5,
assigned_executor: task.executor === 'auto' ? 'codex' : task.executor as any,
queued_at: new Date().toISOString()
assigned_executor: task.executor === 'auto' ? 'codex' : task.executor as any
});
added++;
}
@@ -895,7 +943,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
console.log(chalk.bold.cyan('\nActive Queue\n'));
if (!queue.id || queue.queue.length === 0) {
if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue'));
console.log(chalk.gray('Create one: ccw issue queue add <issue-id>'));
console.log(chalk.gray('Or list history: ccw issue queue list'));
@@ -910,7 +958,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
console.log(chalk.gray('QueueID'.padEnd(10) + 'Issue'.padEnd(15) + 'Task'.padEnd(8) + 'Status'.padEnd(12) + 'Executor'));
console.log(chalk.gray('-'.repeat(60)));
for (const item of queue.queue) {
for (const item of queue.tasks) {
const statusColor = {
'pending': chalk.gray,
'ready': chalk.cyan,
@@ -921,7 +969,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
}[item.status] || chalk.white;
console.log(
item.queue_id.padEnd(10) +
item.item_id.padEnd(10) +
item.issue_id.substring(0, 13).padEnd(15) +
item.task_id.padEnd(8) +
statusColor(item.status.padEnd(12)) +
@@ -937,13 +985,13 @@ async function nextAction(options: IssueOptions): Promise<void> {
const queue = readActiveQueue();
// Priority 1: Resume executing tasks (interrupted/crashed)
const executingTasks = queue.queue.filter(item => item.status === 'executing');
const executingTasks = queue.tasks.filter(item => item.status === 'executing');
// Priority 2: Find pending tasks with satisfied dependencies
const pendingTasks = queue.queue.filter(item => {
const pendingTasks = queue.tasks.filter(item => {
if (item.status !== 'pending') return false;
return item.depends_on.every(depId => {
const dep = queue.queue.find(q => q.queue_id === depId);
const dep = queue.tasks.find(q => q.item_id === depId);
return !dep || dep.status === 'completed';
});
});
@@ -976,25 +1024,25 @@ async function nextAction(options: IssueOptions): Promise<void> {
// Only update status if not already executing (new task)
if (!isResume) {
const idx = queue.queue.findIndex(q => q.queue_id === nextItem.queue_id);
queue.queue[idx].status = 'executing';
queue.queue[idx].started_at = new Date().toISOString();
const idx = queue.tasks.findIndex(q => q.item_id === nextItem.item_id);
queue.tasks[idx].status = 'executing';
queue.tasks[idx].started_at = new Date().toISOString();
writeQueue(queue);
updateIssue(nextItem.issue_id, { status: 'executing' });
}
// Calculate queue stats for context
const stats = {
total: queue.queue.length,
completed: queue.queue.filter(q => q.status === 'completed').length,
failed: queue.queue.filter(q => q.status === 'failed').length,
total: queue.tasks.length,
completed: queue.tasks.filter(q => q.status === 'completed').length,
failed: queue.tasks.filter(q => q.status === 'failed').length,
executing: executingTasks.length,
pending: pendingTasks.length
};
const remaining = stats.pending + stats.executing;
console.log(JSON.stringify({
queue_id: nextItem.queue_id,
item_id: nextItem.item_id,
issue_id: nextItem.issue_id,
solution_id: nextItem.solution_id,
task: taskDef,
@@ -1025,7 +1073,7 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
}
const queue = readActiveQueue();
const idx = queue.queue.findIndex(q => q.queue_id === queueId);
const idx = queue.tasks.findIndex(q => q.item_id === queueId);
if (idx === -1) {
console.error(chalk.red(`Queue item "${queueId}" not found`));
@@ -1033,22 +1081,22 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
}
const isFail = options.fail;
queue.queue[idx].status = isFail ? 'failed' : 'completed';
queue.queue[idx].completed_at = new Date().toISOString();
queue.tasks[idx].status = isFail ? 'failed' : 'completed';
queue.tasks[idx].completed_at = new Date().toISOString();
if (isFail) {
queue.queue[idx].failure_reason = options.reason || 'Unknown failure';
queue.tasks[idx].failure_reason = options.reason || 'Unknown failure';
} else if (options.result) {
try {
queue.queue[idx].result = JSON.parse(options.result);
queue.tasks[idx].result = JSON.parse(options.result);
} catch {
console.warn(chalk.yellow('Warning: Could not parse result JSON'));
}
}
// Check if all issue tasks are complete
const issueId = queue.queue[idx].issue_id;
const issueTasks = queue.queue.filter(q => q.issue_id === issueId);
const issueId = queue.tasks[idx].issue_id;
const issueTasks = queue.tasks.filter(q => q.issue_id === issueId);
const allIssueComplete = issueTasks.every(q => q.status === 'completed');
const anyIssueFailed = issueTasks.some(q => q.status === 'failed');
@@ -1064,13 +1112,13 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
}
// Check if entire queue is complete
const allQueueComplete = queue.queue.every(q => q.status === 'completed');
const anyQueueFailed = queue.queue.some(q => q.status === 'failed');
const allQueueComplete = queue.tasks.every(q => q.status === 'completed');
const anyQueueFailed = queue.tasks.some(q => q.status === 'failed');
if (allQueueComplete) {
queue.status = 'completed';
console.log(chalk.green(`\n✓ Queue ${queue.id} completed (all tasks done)`));
} else if (anyQueueFailed && queue.queue.every(q => q.status === 'completed' || q.status === 'failed')) {
} else if (anyQueueFailed && queue.tasks.every(q => q.status === 'completed' || q.status === 'failed')) {
queue.status = 'failed';
console.log(chalk.yellow(`\n⚠ Queue ${queue.id} has failed tasks`));
}
@@ -1079,24 +1127,20 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
}
/**
* retry - Retry failed tasks, or reset stuck executing tasks (--force)
* retry - Reset failed tasks to pending for re-execution
*/
async function retryAction(issueId: string | undefined, options: IssueOptions): Promise<void> {
const queue = readActiveQueue();
if (!queue.id || queue.queue.length === 0) {
if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue'));
return;
}
let updated = 0;
// Check for stuck executing tasks (started > 30 min ago with no completion)
const stuckThreshold = 30 * 60 * 1000; // 30 minutes
const now = Date.now();
for (const item of queue.queue) {
// Retry failed tasks
for (const item of queue.tasks) {
// Retry failed tasks only
if (item.status === 'failed') {
if (!issueId || item.issue_id === issueId) {
item.status = 'pending';
@@ -1106,23 +1150,11 @@ async function retryAction(issueId: string | undefined, options: IssueOptions):
updated++;
}
}
// Reset stuck executing tasks (optional: use --force or --reset-stuck)
else if (item.status === 'executing' && options.force) {
const startedAt = item.started_at ? new Date(item.started_at).getTime() : 0;
if (now - startedAt > stuckThreshold) {
if (!issueId || item.issue_id === issueId) {
console.log(chalk.yellow(`Resetting stuck task: ${item.queue_id} (started ${Math.round((now - startedAt) / 60000)} min ago)`));
item.status = 'pending';
item.started_at = undefined;
updated++;
}
}
}
}
if (updated === 0) {
console.log(chalk.yellow('No failed/stuck tasks to retry'));
console.log(chalk.gray('Use --force to reset stuck executing tasks (>30 min)'));
console.log(chalk.yellow('No failed tasks to retry'));
console.log(chalk.gray('Note: Interrupted (executing) tasks are auto-resumed by "ccw issue next"'));
return;
}
@@ -1203,7 +1235,8 @@ export async function issueCommand(
console.log(chalk.gray(' queue add <issue-id> Add issue to active queue (or create new)'));
console.log(chalk.gray(' queue switch <queue-id> Switch active queue'));
console.log(chalk.gray(' queue archive Archive current queue'));
console.log(chalk.gray(' retry [issue-id] [--force] Retry failed/stuck tasks'));
console.log(chalk.gray(' queue delete <queue-id> Delete queue from history'));
console.log(chalk.gray(' retry [issue-id] Retry failed tasks'));
console.log();
console.log(chalk.bold('Execution Endpoints:'));
console.log(chalk.gray(' next Get next ready task (JSON)'));
@@ -1212,6 +1245,8 @@ export async function issueCommand(
console.log();
console.log(chalk.bold('Options:'));
console.log(chalk.gray(' --title <title> Issue/task title'));
console.log(chalk.gray(' --status <status> Filter by status (comma-separated)'));
console.log(chalk.gray(' --ids List only IDs (one per line)'));
console.log(chalk.gray(' --solution <path> Solution JSON file'));
console.log(chalk.gray(' --result <json> Execution result'));
console.log(chalk.gray(' --reason <text> Failure reason'));

View File

@@ -5,7 +5,9 @@
* Storage Structure:
* .workflow/issues/
* ├── issues.jsonl # All issues (one per line)
* ├── queue.json # Execution queue
* ├── queues/ # Queue history directory
* │ ├── index.json # Queue index (active + history)
* │ └── {queue-id}.json # Individual queue files
* └── solutions/
* ├── {issue-id}.jsonl # Solutions for issue (one per line)
* └── ...
@@ -102,12 +104,12 @@ function readQueue(issuesDir: string) {
}
}
return { queue: [], conflicts: [], execution_groups: [], _metadata: { version: '1.0', total_tasks: 0 } };
return { tasks: [], conflicts: [], execution_groups: [], _metadata: { version: '1.0', total_tasks: 0 } };
}
function writeQueue(issuesDir: string, queue: any) {
if (!existsSync(issuesDir)) mkdirSync(issuesDir, { recursive: true });
queue._metadata = { ...queue._metadata, updated_at: new Date().toISOString(), total_tasks: queue.queue?.length || 0 };
queue._metadata = { ...queue._metadata, updated_at: new Date().toISOString(), total_tasks: queue.tasks?.length || 0 };
// Check if using new multi-queue structure
const queuesDir = join(issuesDir, 'queues');
@@ -123,8 +125,8 @@ function writeQueue(issuesDir: string, queue: any) {
const index = JSON.parse(readFileSync(indexPath, 'utf8'));
const queueEntry = index.queues?.find((q: any) => q.id === queue.id);
if (queueEntry) {
queueEntry.total_tasks = queue.queue?.length || 0;
queueEntry.completed_tasks = queue.queue?.filter((i: any) => i.status === 'completed').length || 0;
queueEntry.total_tasks = queue.tasks?.length || 0;
queueEntry.completed_tasks = queue.tasks?.filter((i: any) => i.status === 'completed').length || 0;
writeFileSync(indexPath, JSON.stringify(index, null, 2));
}
} catch {
@@ -151,15 +153,29 @@ function getIssueDetail(issuesDir: string, issueId: string) {
}
function enrichIssues(issues: any[], issuesDir: string) {
return issues.map(issue => ({
...issue,
solution_count: readSolutionsJsonl(issuesDir, issue.id).length
}));
return issues.map(issue => {
const solutions = readSolutionsJsonl(issuesDir, issue.id);
let taskCount = 0;
// Get task count from bound solution
if (issue.bound_solution_id) {
const boundSol = solutions.find(s => s.id === issue.bound_solution_id);
if (boundSol?.tasks) {
taskCount = boundSol.tasks.length;
}
}
return {
...issue,
solution_count: solutions.length,
task_count: taskCount
};
});
}
function groupQueueByExecutionGroup(queue: any) {
const groups: { [key: string]: any[] } = {};
for (const item of queue.queue || []) {
for (const item of queue.tasks || []) {
const groupId = item.execution_group || 'ungrouped';
if (!groups[groupId]) groups[groupId] = [];
groups[groupId].push(item);
@@ -171,7 +187,7 @@ function groupQueueByExecutionGroup(queue: any) {
id,
type: id.startsWith('P') ? 'parallel' : id.startsWith('S') ? 'sequential' : 'unknown',
task_count: items.length,
tasks: items.map(i => i.queue_id)
tasks: items.map(i => i.item_id)
})).sort((a, b) => {
const aFirst = groups[a.id]?.[0]?.execution_order || 0;
const bFirst = groups[b.id]?.[0]?.execution_order || 0;
@@ -229,20 +245,20 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
}
const queue = readQueue(issuesDir);
const groupItems = queue.queue.filter((item: any) => item.execution_group === groupId);
const otherItems = queue.queue.filter((item: any) => item.execution_group !== groupId);
const groupItems = queue.tasks.filter((item: any) => item.execution_group === groupId);
const otherItems = queue.tasks.filter((item: any) => item.execution_group !== groupId);
if (groupItems.length === 0) return { error: `No items in group ${groupId}` };
const groupQueueIds = new Set(groupItems.map((i: any) => i.queue_id));
if (groupQueueIds.size !== new Set(newOrder).size) {
const groupItemIds = new Set(groupItems.map((i: any) => i.item_id));
if (groupItemIds.size !== new Set(newOrder).size) {
return { error: 'newOrder must contain all group items' };
}
for (const id of newOrder) {
if (!groupQueueIds.has(id)) return { error: `Invalid queue_id: ${id}` };
if (!groupItemIds.has(id)) return { error: `Invalid item_id: ${id}` };
}
const itemMap = new Map(groupItems.map((i: any) => [i.queue_id, i]));
const itemMap = new Map(groupItems.map((i: any) => [i.item_id, i]));
const reorderedItems = newOrder.map((qid: string, idx: number) => ({ ...itemMap.get(qid), _idx: idx }));
const newQueue = [...otherItems, ...reorderedItems].sort((a, b) => {
const aGroup = parseInt(a.execution_group?.match(/\d+/)?.[0] || '999');
@@ -255,7 +271,7 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
});
newQueue.forEach((item, idx) => { item.execution_order = idx + 1; delete item._idx; });
queue.queue = newQueue;
queue.tasks = newQueue;
writeQueue(issuesDir, queue);
return { success: true, groupId, reordered: newOrder.length };

View File

@@ -6,7 +6,7 @@
// ========== Issue State ==========
var issueData = {
issues: [],
queue: { queue: [], conflicts: [], execution_groups: [], grouped_items: {} },
queue: { tasks: [], conflicts: [], execution_groups: [], grouped_items: {} },
selectedIssue: null,
selectedSolution: null,
selectedSolutionIssueId: null,
@@ -65,7 +65,7 @@ async function loadQueueData() {
issueData.queue = await response.json();
} catch (err) {
console.error('Failed to load queue:', err);
issueData.queue = { queue: [], conflicts: [], execution_groups: [], grouped_items: {} };
issueData.queue = { tasks: [], conflicts: [], execution_groups: [], grouped_items: {} };
}
}
@@ -360,7 +360,7 @@ function filterIssuesByStatus(status) {
// ========== Queue Section ==========
function renderQueueSection() {
const queue = issueData.queue;
const queueItems = queue.queue || [];
const queueItems = queue.tasks || [];
const metadata = queue._metadata || {};
// Check if queue is empty
@@ -530,10 +530,10 @@ function renderQueueItem(item, index, total) {
return `
<div class="queue-item ${statusColors[item.status] || ''}"
draggable="true"
data-queue-id="${item.queue_id}"
data-item-id="${item.item_id}"
data-group-id="${item.execution_group}"
onclick="openQueueItemDetail('${item.queue_id}')">
<span class="queue-item-id font-mono text-xs">${item.queue_id}</span>
onclick="openQueueItemDetail('${item.item_id}')">
<span class="queue-item-id font-mono text-xs">${item.item_id}</span>
<span class="queue-item-issue text-xs text-muted-foreground">${item.issue_id}</span>
<span class="queue-item-task text-sm">${item.task_id}</span>
<span class="queue-item-priority" style="opacity: ${item.semantic_priority || 0.5}">
@@ -586,12 +586,12 @@ function handleIssueDragStart(e) {
const item = e.target.closest('.queue-item');
if (!item) return;
issueDragState.dragging = item.dataset.queueId;
issueDragState.dragging = item.dataset.itemId;
issueDragState.groupId = item.dataset.groupId;
item.classList.add('dragging');
e.dataTransfer.effectAllowed = 'move';
e.dataTransfer.setData('text/plain', item.dataset.queueId);
e.dataTransfer.setData('text/plain', item.dataset.itemId);
}
function handleIssueDragEnd(e) {
@@ -610,7 +610,7 @@ function handleIssueDragOver(e) {
e.preventDefault();
const target = e.target.closest('.queue-item');
if (!target || target.dataset.queueId === issueDragState.dragging) return;
if (!target || target.dataset.itemId === issueDragState.dragging) return;
// Only allow drag within same group
if (target.dataset.groupId !== issueDragState.groupId) {
@@ -635,7 +635,7 @@ function handleIssueDrop(e) {
// Get new order
const items = Array.from(container.querySelectorAll('.queue-item'));
const draggedItem = items.find(i => i.dataset.queueId === issueDragState.dragging);
const draggedItem = items.find(i => i.dataset.itemId === issueDragState.dragging);
const targetIndex = items.indexOf(target);
const draggedIndex = items.indexOf(draggedItem);
@@ -649,7 +649,7 @@ function handleIssueDrop(e) {
}
// Get new order and save
const newOrder = Array.from(container.querySelectorAll('.queue-item')).map(i => i.dataset.queueId);
const newOrder = Array.from(container.querySelectorAll('.queue-item')).map(i => i.dataset.itemId);
saveQueueOrder(issueDragState.groupId, newOrder);
}
@@ -767,7 +767,7 @@ function renderIssueDetailPanel(issue) {
<div class="flex items-center justify-between">
<span class="font-mono text-sm">${task.id}</span>
<select class="task-status-select" onchange="updateTaskStatus('${issue.id}', '${task.id}', this.value)">
${['pending', 'ready', 'in_progress', 'completed', 'failed', 'paused', 'skipped'].map(s =>
${['pending', 'ready', 'executing', 'completed', 'failed', 'blocked', 'paused', 'skipped'].map(s =>
`<option value="${s}" ${task.status === s ? 'selected' : ''}>${s}</option>`
).join('')}
</select>
@@ -1145,8 +1145,8 @@ function escapeHtml(text) {
return div.innerHTML;
}
function openQueueItemDetail(queueId) {
const item = issueData.queue.queue?.find(q => q.queue_id === queueId);
function openQueueItemDetail(itemId) {
const item = issueData.queue.tasks?.find(q => q.item_id === itemId);
if (item) {
openIssueDetail(item.issue_id);
}

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{
"name": "claude-code-workflow",
"version": "6.2.9",
"version": "6.3.8",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "claude-code-workflow",
"version": "6.2.9",
"version": "6.3.8",
"license": "MIT",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4",

View File

@@ -1,6 +1,6 @@
{
"name": "claude-code-workflow",
"version": "6.3.6",
"version": "6.3.8",
"description": "JSON-driven multi-agent development framework with intelligent CLI orchestration (Gemini/Qwen/Codex), context-first architecture, and automated workflow execution",
"type": "module",
"main": "ccw/src/index.js",