refactor: Update issue queue structure and commands

- Changed queue structure from 'queue' to 'tasks' in various files for clarity.
- Updated CLI commands to reflect new task ID usage instead of queue ID.
- Enhanced queue management with new delete functionality for historical queues.
- Improved metadata handling and task execution tracking.
- Updated dashboard and issue manager views to accommodate new task structure.
- Bumped version to 6.3.8 in package.json and package-lock.json.
This commit is contained in:
catlog22
2025-12-27 22:04:15 +08:00
parent 2e493277a1
commit b58589ddad
13 changed files with 394 additions and 336 deletions

View File

@@ -20,14 +20,7 @@ You are a specialized issue planning agent that combines exploration and plannin
```javascript ```javascript
{ {
// Required // Required
issues: [ issue_ids: string[], // Issue IDs only (e.g., ["GH-123", "GH-124"])
{
id: string, // Issue ID (e.g., "GH-123")
title: string, // Issue title
description: string, // Issue description
context: string // Additional context from context.md
}
],
project_root: string, // Project root path for ACE search project_root: string, // Project root path for ACE search
// Optional // Optional
@@ -36,6 +29,8 @@ You are a specialized issue planning agent that combines exploration and plannin
} }
``` ```
**Note**: Agent receives IDs only. Use `ccw issue status <id> --json` to fetch full details.
## Schema-Driven Output ## Schema-Driven Output
**CRITICAL**: Read the solution schema first to determine output structure: **CRITICAL**: Read the solution schema first to determine output structure:
@@ -65,6 +60,31 @@ Phase 4: Validation & Output (15%)
## Phase 1: Issue Understanding ## Phase 1: Issue Understanding
### Step 1: Fetch Issue Details via CLI
For each issue ID received, fetch full details:
```bash
ccw issue status <issue-id> --json
```
Returns:
```json
{
"issue": {
"id": "GH-123",
"title": "Add authentication",
"context": "...",
"affected_components": ["auth", "api"],
"lifecycle_requirements": { "test_strategy": "unit", "regression_scope": "affected" }
},
"solutions": [],
"bound": null
}
```
### Step 2: Analyze Issue
**Extract from each issue**: **Extract from each issue**:
- Title and description analysis - Title and description analysis
- Key requirements and constraints - Key requirements and constraints
@@ -661,6 +681,23 @@ function generateOutput(solutions, conflicts) {
} }
``` ```
### Solution Registration via CLI
**IMPORTANT**: Register solutions using CLI instead of direct file writes:
```bash
# 1. Write solution JSON to temp file
echo '<solution-json>' > /tmp/sol-{issue-id}.json
# 2. Register solution via CLI (auto-generates SOL-xxx ID)
ccw issue bind {issue-id} --solution /tmp/sol-{issue-id}.json
```
**CLI Output**: Returns registered solution ID for summary:
```
✓ Solution SOL-20251227-001 registered (5 tasks)
```
### Solution Schema (Closed-Loop Tasks) ### Solution Schema (Closed-Loop Tasks)
Each task MUST include ALL 5 lifecycle phases: Each task MUST include ALL 5 lifecycle phases:

View File

@@ -500,35 +500,35 @@ function canRunParallel(taskKey, groupTasks, taskGraph, conflicts) {
```javascript ```javascript
function generateQueueItems(orderedTasks, taskGraph, conflicts) { function generateQueueItems(orderedTasks, taskGraph, conflicts) {
const queueItems = [] const queueItems = []
let queueIdCounter = 1 let itemIdCounter = 1
for (const key of orderedTasks) { for (const key of orderedTasks) {
const node = taskGraph.get(key) const node = taskGraph.get(key)
queueItems.push({ queueItems.push({
queue_id: `Q-${String(queueIdCounter++).padStart(3, '0')}`, item_id: `T-${itemIdCounter++}`,
issue_id: node.issue_id, issue_id: node.issue_id,
solution_id: node.solution_id, solution_id: node.solution_id,
task_id: node.task.id, task_id: node.task.id,
status: 'pending', status: 'pending',
execution_order: node.execution_order, execution_order: node.execution_order,
execution_group: node.execution_group, execution_group: node.execution_group,
depends_on: mapDependenciesToQueueIds(node, queueItems), depends_on: mapDependenciesToItemIds(node, queueItems),
semantic_priority: node.semantic_priority, semantic_priority: node.semantic_priority,
queued_at: new Date().toISOString() assigned_executor: node.task.executor || 'codex'
}) })
} }
return queueItems return queueItems
} }
function mapDependenciesToQueueIds(node, queueItems) { function mapDependenciesToItemIds(node, queueItems) {
return (node.task.depends_on || []).map(dep => { return (node.task.depends_on || []).map(dep => {
const depKey = `${node.issue_id}:${dep}` const depKey = `${node.issue_id}:${dep}`
const queueItem = queueItems.find(q => const queueItem = queueItems.find(q =>
q.issue_id === node.issue_id && q.task_id === dep q.issue_id === node.issue_id && q.task_id === dep
) )
return queueItem?.queue_id || dep return queueItem?.item_id || dep
}) })
} }
``` ```
@@ -538,7 +538,7 @@ function mapDependenciesToQueueIds(node, queueItems) {
```javascript ```javascript
function generateOutput(queueItems, conflicts, groups) { function generateOutput(queueItems, conflicts, groups) {
return { return {
queue: queueItems, tasks: queueItems,
conflicts: conflicts.map(c => ({ conflicts: conflicts.map(c => ({
type: c.type, type: c.type,
file: c.file, file: c.file,
@@ -652,10 +652,10 @@ function validateOrdering(queueItems, taskGraph) {
const node = taskGraph.get(key) const node = taskGraph.get(key)
// Check dependencies come before // Check dependencies come before
for (const depQueueId of item.depends_on) { for (const depItemId of item.depends_on) {
const depItem = queueItems.find(q => q.queue_id === depQueueId) const depItem = queueItems.find(q => q.item_id === depItemId)
if (depItem && depItem.execution_order >= item.execution_order) { if (depItem && depItem.execution_order >= item.execution_order) {
errors.push(`${item.queue_id} ordered before dependency ${depQueueId}`) errors.push(`${item.item_id} ordered before dependency ${depItemId}`)
} }
} }
} }
@@ -690,7 +690,7 @@ function validateOrdering(queueItems, taskGraph) {
5. Calculate semantic priority for all tasks 5. Calculate semantic priority for all tasks
6. Validate ordering before output 6. Validate ordering before output
7. Include rationale for conflict resolutions 7. Include rationale for conflict resolutions
8. Map depends_on to queue_ids in output 8. Map depends_on to item_ids in output
**NEVER**: **NEVER**:
1. Execute tasks (ordering only) 1. Execute tasks (ordering only)

View File

@@ -17,12 +17,14 @@ Execution orchestrator that coordinates codex instances. Each task is executed b
- No file reading in codex - No file reading in codex
- Orchestrator manages parallelism - Orchestrator manages parallelism
## Storage Structure (Flat JSONL) ## Storage Structure (Queue History)
``` ```
.workflow/issues/ .workflow/issues/
├── issues.jsonl # All issues (one per line) ├── issues.jsonl # All issues (one per line)
├── queue.json # Execution queue ├── queues/ # Queue history directory
│ ├── index.json # Queue index (active + history)
│ └── {queue-id}.json # Individual queue files
└── solutions/ └── solutions/
├── {issue-id}.jsonl # Solutions for issue ├── {issue-id}.jsonl # Solutions for issue
└── ... └── ...
@@ -78,19 +80,19 @@ Phase 4: Completion
### Phase 1: Queue Loading ### Phase 1: Queue Loading
```javascript ```javascript
// Load queue // Load active queue via CLI endpoint
const queuePath = '.workflow/issues/queue.json'; const queueJson = Bash(`ccw issue status --json 2>/dev/null || echo '{}'`);
if (!Bash(`test -f "${queuePath}" && echo exists`).includes('exists')) { const queue = JSON.parse(queueJson);
console.log('No queue found. Run /issue:queue first.');
if (!queue.id || queue.tasks?.length === 0) {
console.log('No active queue found. Run /issue:queue first.');
return; return;
} }
const queue = JSON.parse(Read(queuePath));
// Count by status // Count by status
const pending = queue.queue.filter(q => q.status === 'pending'); const pending = queue.tasks.filter(q => q.status === 'pending');
const executing = queue.queue.filter(q => q.status === 'executing'); const executing = queue.tasks.filter(q => q.status === 'executing');
const completed = queue.queue.filter(q => q.status === 'completed'); const completed = queue.tasks.filter(q => q.status === 'completed');
console.log(` console.log(`
## Execution Queue Status ## Execution Queue Status
@@ -98,7 +100,7 @@ console.log(`
- Pending: ${pending.length} - Pending: ${pending.length}
- Executing: ${executing.length} - Executing: ${executing.length}
- Completed: ${completed.length} - Completed: ${completed.length}
- Total: ${queue.queue.length} - Total: ${queue.tasks.length}
`); `);
if (pending.length === 0 && executing.length === 0) { if (pending.length === 0 && executing.length === 0) {
@@ -113,10 +115,10 @@ if (pending.length === 0 && executing.length === 0) {
// Find ready tasks (dependencies satisfied) // Find ready tasks (dependencies satisfied)
function getReadyTasks() { function getReadyTasks() {
const completedIds = new Set( const completedIds = new Set(
queue.queue.filter(q => q.status === 'completed').map(q => q.queue_id) queue.tasks.filter(q => q.status === 'completed').map(q => q.item_id)
); );
return queue.queue.filter(item => { return queue.tasks.filter(item => {
if (item.status !== 'pending') return false; if (item.status !== 'pending') return false;
return item.depends_on.every(depId => completedIds.has(depId)); return item.depends_on.every(depId => completedIds.has(depId));
}); });
@@ -141,9 +143,9 @@ readyTasks.sort((a, b) => a.execution_order - b.execution_order);
// Initialize TodoWrite // Initialize TodoWrite
TodoWrite({ TodoWrite({
todos: readyTasks.slice(0, parallelLimit).map(t => ({ todos: readyTasks.slice(0, parallelLimit).map(t => ({
content: `[${t.queue_id}] ${t.issue_id}:${t.task_id}`, content: `[${t.item_id}] ${t.issue_id}:${t.task_id}`,
status: 'pending', status: 'pending',
activeForm: `Executing ${t.queue_id}` activeForm: `Executing ${t.item_id}`
})) }))
}); });
``` ```
@@ -207,7 +209,7 @@ This returns JSON with full lifecycle definition:
### Step 3: Report Completion ### Step 3: Report Completion
When ALL phases complete successfully: When ALL phases complete successfully:
\`\`\`bash \`\`\`bash
ccw issue complete <queue_id> --result '{ ccw issue complete <item_id> --result '{
"files_modified": ["path1", "path2"], "files_modified": ["path1", "path2"],
"tests_passed": true, "tests_passed": true,
"regression_passed": true, "regression_passed": true,
@@ -220,7 +222,7 @@ ccw issue complete <queue_id> --result '{
If any phase fails and cannot be fixed: If any phase fails and cannot be fixed:
\`\`\`bash \`\`\`bash
ccw issue fail <queue_id> --reason "Phase X failed: <details>" ccw issue fail <item_id> --reason "Phase X failed: <details>"
\`\`\` \`\`\`
### Rules ### Rules
@@ -239,12 +241,12 @@ Begin by running: ccw issue next
if (executor === 'codex') { if (executor === 'codex') {
Bash( Bash(
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool codex --mode write --id exec-${queueItem.queue_id}`, `ccw cli -p "${escapePrompt(codexPrompt)}" --tool codex --mode write --id exec-${queueItem.item_id}`,
timeout=3600000 // 1 hour timeout timeout=3600000 // 1 hour timeout
); );
} else if (executor === 'gemini') { } else if (executor === 'gemini') {
Bash( Bash(
`ccw cli -p "${escapePrompt(codexPrompt)}" --tool gemini --mode write --id exec-${queueItem.queue_id}`, `ccw cli -p "${escapePrompt(codexPrompt)}" --tool gemini --mode write --id exec-${queueItem.item_id}`,
timeout=1800000 // 30 min timeout timeout=1800000 // 30 min timeout
); );
} else { } else {
@@ -252,7 +254,7 @@ Begin by running: ccw issue next
Task( Task(
subagent_type="code-developer", subagent_type="code-developer",
run_in_background=false, run_in_background=false,
description=`Execute ${queueItem.queue_id}`, description=`Execute ${queueItem.item_id}`,
prompt=codexPrompt prompt=codexPrompt
); );
} }
@@ -265,23 +267,23 @@ for (let i = 0; i < readyTasks.length; i += parallelLimit) {
const batch = readyTasks.slice(i, i + parallelLimit); const batch = readyTasks.slice(i, i + parallelLimit);
console.log(`\n### Executing Batch ${Math.floor(i / parallelLimit) + 1}`); console.log(`\n### Executing Batch ${Math.floor(i / parallelLimit) + 1}`);
console.log(batch.map(t => `- ${t.queue_id}: ${t.issue_id}:${t.task_id}`).join('\n')); console.log(batch.map(t => `- ${t.item_id}: ${t.issue_id}:${t.task_id}`).join('\n'));
if (parallelLimit === 1) { if (parallelLimit === 1) {
// Sequential execution // Sequential execution
for (const task of batch) { for (const task of batch) {
updateTodo(task.queue_id, 'in_progress'); updateTodo(task.item_id, 'in_progress');
await executeTask(task); await executeTask(task);
updateTodo(task.queue_id, 'completed'); updateTodo(task.item_id, 'completed');
} }
} else { } else {
// Parallel execution - launch all at once // Parallel execution - launch all at once
const executions = batch.map(task => { const executions = batch.map(task => {
updateTodo(task.queue_id, 'in_progress'); updateTodo(task.item_id, 'in_progress');
return executeTask(task); return executeTask(task);
}); });
await Promise.all(executions); await Promise.all(executions);
batch.forEach(task => updateTodo(task.queue_id, 'completed')); batch.forEach(task => updateTodo(task.item_id, 'completed'));
} }
// Refresh ready tasks after batch // Refresh ready tasks after batch
@@ -298,7 +300,7 @@ When codex calls `ccw issue next`, it receives:
```json ```json
{ {
"queue_id": "Q-001", "item_id": "T-1",
"issue_id": "GH-123", "issue_id": "GH-123",
"solution_id": "SOL-001", "solution_id": "SOL-001",
"task": { "task": {
@@ -336,60 +338,38 @@ When codex calls `ccw issue next`, it receives:
### Phase 4: Completion Summary ### Phase 4: Completion Summary
```javascript ```javascript
// Reload queue for final status // Reload queue for final status via CLI
const finalQueue = JSON.parse(Read(queuePath)); const finalQueueJson = Bash(`ccw issue status --json 2>/dev/null || echo '{}'`);
const finalQueue = JSON.parse(finalQueueJson);
const summary = { // Use queue._metadata for summary (already calculated by CLI)
completed: finalQueue.queue.filter(q => q.status === 'completed').length, const summary = finalQueue._metadata || {
failed: finalQueue.queue.filter(q => q.status === 'failed').length, completed_count: 0,
pending: finalQueue.queue.filter(q => q.status === 'pending').length, failed_count: 0,
total: finalQueue.queue.length pending_count: 0,
total_tasks: 0
}; };
console.log(` console.log(`
## Execution Complete ## Execution Complete
**Completed**: ${summary.completed}/${summary.total} **Completed**: ${summary.completed_count}/${summary.total_tasks}
**Failed**: ${summary.failed} **Failed**: ${summary.failed_count}
**Pending**: ${summary.pending} **Pending**: ${summary.pending_count}
### Task Results ### Task Results
${finalQueue.queue.map(q => { ${(finalQueue.tasks || []).map(q => {
const icon = q.status === 'completed' ? '✓' : const icon = q.status === 'completed' ? '✓' :
q.status === 'failed' ? '✗' : q.status === 'failed' ? '✗' :
q.status === 'executing' ? '⟳' : '○'; q.status === 'executing' ? '⟳' : '○';
return `${icon} ${q.queue_id} [${q.issue_id}:${q.task_id}] - ${q.status}`; return `${icon} ${q.item_id} [${q.issue_id}:${q.task_id}] - ${q.status}`;
}).join('\n')} }).join('\n')}
`); `);
// Update issue statuses in issues.jsonl // Issue status updates are handled by ccw issue complete/fail endpoints
const issuesPath = '.workflow/issues/issues.jsonl'; // No need to manually update issues.jsonl here
const allIssues = Bash(`cat "${issuesPath}"`)
.split('\n')
.filter(line => line.trim())
.map(line => JSON.parse(line));
const issueIds = [...new Set(finalQueue.queue.map(q => q.issue_id))]; if (summary.pending_count > 0) {
for (const issueId of issueIds) {
const issueTasks = finalQueue.queue.filter(q => q.issue_id === issueId);
if (issueTasks.every(q => q.status === 'completed')) {
console.log(`\n✓ Issue ${issueId} fully completed!`);
// Update issue status
const issueIndex = allIssues.findIndex(i => i.id === issueId);
if (issueIndex !== -1) {
allIssues[issueIndex].status = 'completed';
allIssues[issueIndex].completed_at = new Date().toISOString();
allIssues[issueIndex].updated_at = new Date().toISOString();
}
}
}
// Write updated issues.jsonl
Write(issuesPath, allIssues.map(i => JSON.stringify(i)).join('\n'));
if (summary.pending > 0) {
console.log(` console.log(`
### Continue Execution ### Continue Execution
Run \`/issue:execute\` again to execute remaining tasks. Run \`/issue:execute\` again to execute remaining tasks.
@@ -405,7 +385,7 @@ if (flags.dryRun) {
## Dry Run - Would Execute ## Dry Run - Would Execute
${readyTasks.map((t, i) => ` ${readyTasks.map((t, i) => `
${i + 1}. ${t.queue_id} ${i + 1}. ${t.item_id}
Issue: ${t.issue_id} Issue: ${t.issue_id}
Task: ${t.task_id} Task: ${t.task_id}
Executor: ${t.assigned_executor} Executor: ${t.assigned_executor}
@@ -426,7 +406,32 @@ No changes made. Remove --dry-run to execute.
| No ready tasks | Check dependencies, show blocked tasks | | No ready tasks | Check dependencies, show blocked tasks |
| Codex timeout | Mark as failed, allow retry | | Codex timeout | Mark as failed, allow retry |
| ccw issue next empty | All tasks done or blocked | | ccw issue next empty | All tasks done or blocked |
| Task execution failure | Marked via ccw issue fail | | Task execution failure | Marked via ccw issue fail, use `ccw issue retry` to reset |
## Troubleshooting
### Interrupted Tasks
If execution was interrupted (crashed/stopped), `ccw issue next` will automatically resume:
```bash
# Automatically returns the executing task for resumption
ccw issue next
```
Tasks in `executing` status are prioritized and returned first, no manual reset needed.
### Failed Tasks
If a task failed and you want to retry:
```bash
# Reset all failed tasks to pending
ccw issue retry
# Reset failed tasks for specific issue
ccw issue retry <issue-id>
```
## Endpoint Contract ## Endpoint Contract
@@ -435,16 +440,20 @@ No changes made. Remove --dry-run to execute.
- Marks task as 'executing' - Marks task as 'executing'
- Returns `{ status: 'empty' }` when no tasks - Returns `{ status: 'empty' }` when no tasks
### `ccw issue complete <queue-id>` ### `ccw issue complete <item-id>`
- Marks task as 'completed' - Marks task as 'completed'
- Updates queue.json - Updates queue.json
- Checks if issue is fully complete - Checks if issue is fully complete
### `ccw issue fail <queue-id>` ### `ccw issue fail <item-id>`
- Marks task as 'failed' - Marks task as 'failed'
- Records failure reason - Records failure reason
- Allows retry via /issue:execute - Allows retry via /issue:execute
### `ccw issue retry [issue-id]`
- Resets failed tasks to 'pending'
- Allows re-execution via `ccw issue next`
## Related Commands ## Related Commands
- `/issue:plan` - Plan issues with solutions - `/issue:plan` - Plan issues with solutions

View File

@@ -30,7 +30,7 @@ ccw issue task <id> --title "..." # Add task
ccw issue queue # List queue ccw issue queue # List queue
ccw issue queue add <id> # Add to queue ccw issue queue add <id> # Add to queue
ccw issue next # Get next task ccw issue next # Get next task
ccw issue done <queue-id> # Complete task ccw issue complete <item-id> # Complete task
``` ```
## Usage ## Usage
@@ -561,7 +561,7 @@ async function deleteIssueInteractive(issueId) {
const queuePath = '.workflow/issues/queue.json'; const queuePath = '.workflow/issues/queue.json';
if (Bash(`test -f "${queuePath}" && echo exists`) === 'exists') { if (Bash(`test -f "${queuePath}" && echo exists`) === 'exists') {
const queue = JSON.parse(Bash(`cat "${queuePath}"`)); const queue = JSON.parse(Bash(`cat "${queuePath}"`));
queue.queue = queue.queue.filter(q => q.issue_id !== issueId); queue.tasks = queue.tasks.filter(q => q.issue_id !== issueId);
Write(queuePath, JSON.stringify(queue, null, 2)); Write(queuePath, JSON.stringify(queue, null, 2));
} }

View File

@@ -75,26 +75,16 @@ Phase 4: Summary
## Implementation ## Implementation
### Phase 1: Issue Loading ### Phase 1: Issue Loading (IDs Only)
```javascript ```javascript
// Parse input and flags
const issuesPath = '.workflow/issues/issues.jsonl';
const batchSize = flags.batchSize || 3; const batchSize = flags.batchSize || 3;
// Key fields for planning (avoid loading full issue data)
const PLAN_FIELDS = 'id,title,status,context,affected_components,lifecycle_requirements,priority,bound_solution_id';
let issueIds = []; let issueIds = [];
if (flags.allPending) { if (flags.allPending) {
// Use jq to filter pending/registered issues - extract only IDs // Get pending issue IDs directly via CLI
const pendingIds = Bash(` const ids = Bash(`ccw issue list --status pending,registered --ids`).trim();
cat "${issuesPath}" 2>/dev/null | \\ issueIds = ids ? ids.split('\n').filter(Boolean) : [];
jq -r 'select(.status == "pending" or .status == "registered") | .id' 2>/dev/null || echo ''
`).trim();
issueIds = pendingIds ? pendingIds.split('\n').filter(Boolean) : [];
if (issueIds.length === 0) { if (issueIds.length === 0) {
console.log('No pending issues found.'); console.log('No pending issues found.');
@@ -106,50 +96,27 @@ if (flags.allPending) {
issueIds = userInput.includes(',') issueIds = userInput.includes(',')
? userInput.split(',').map(s => s.trim()) ? userInput.split(',').map(s => s.trim())
: [userInput.trim()]; : [userInput.trim()];
}
// Load issues using jq to extract only key fields // Create if not exists
const issues = []; for (const id of issueIds) {
for (const id of issueIds) { Bash(`ccw issue init ${id} --title "Issue ${id}" 2>/dev/null || true`);
// Use jq to find issue by ID and extract only needed fields
const issueJson = Bash(`
cat "${issuesPath}" 2>/dev/null | \\
jq -c 'select(.id == "${id}") | {${PLAN_FIELDS}}' 2>/dev/null | head -1
`).trim();
let issue;
if (issueJson) {
issue = JSON.parse(issueJson);
} else {
console.log(`Issue ${id} not found. Creating...`);
issue = {
id,
title: `Issue ${id}`,
status: 'registered',
priority: 3,
context: '',
created_at: new Date().toISOString(),
updated_at: new Date().toISOString()
};
// Append to issues.jsonl
Bash(`echo '${JSON.stringify(issue)}' >> "${issuesPath}"`);
} }
issues.push(issue);
} }
// Group into batches // Group into batches
const batches = []; const batches = [];
for (let i = 0; i < issues.length; i += batchSize) { for (let i = 0; i < issueIds.length; i += batchSize) {
batches.push(issues.slice(i, i + batchSize)); batches.push(issueIds.slice(i, i + batchSize));
} }
console.log(`Processing ${issues.length} issues in ${batches.length} batch(es)`); console.log(`Processing ${issueIds.length} issues in ${batches.length} batch(es)`);
TodoWrite({ TodoWrite({
todos: batches.flatMap((batch, i) => [ todos: batches.map((_, i) => ({
{ content: `Plan batch ${i+1}`, status: 'pending', activeForm: `Planning batch ${i+1}` } content: `Plan batch ${i+1}`,
]) status: 'pending',
activeForm: `Planning batch ${i+1}`
}))
}); });
``` ```
@@ -162,36 +129,47 @@ Bash(`mkdir -p .workflow/issues/solutions`);
for (const [batchIndex, batch] of batches.entries()) { for (const [batchIndex, batch] of batches.entries()) {
updateTodo(`Plan batch ${batchIndex + 1}`, 'in_progress'); updateTodo(`Plan batch ${batchIndex + 1}`, 'in_progress');
// Build issue prompt for agent - agent writes solutions directly // Build issue prompt for agent - pass IDs only, agent fetches details
const issuePrompt = ` const issuePrompt = `
## Issues to Plan (Closed-Loop Tasks Required) ## Issues to Plan (Closed-Loop Tasks Required)
${batch.map((issue, i) => ` **Issue IDs**: ${batch.join(', ')}
### Issue ${i + 1}: ${issue.id}
**Title**: ${issue.title}
**Context**: ${issue.context || 'No context provided'}
**Affected Components**: ${issue.affected_components?.join(', ') || 'Not specified'}
**Lifecycle Requirements**: ### Step 1: Fetch Issue Details
- Test Strategy: ${issue.lifecycle_requirements?.test_strategy || 'auto'} For each issue ID, use CLI to get full details:
- Regression Scope: ${issue.lifecycle_requirements?.regression_scope || 'affected'} \`\`\`bash
- Commit Strategy: ${issue.lifecycle_requirements?.commit_strategy || 'per-task'} ccw issue status <issue-id> --json
`).join('\n')} \`\`\`
Returns:
\`\`\`json
{
"issue": { "id", "title", "context", "affected_components", "lifecycle_requirements", ... },
"solutions": [...],
"bound": null
}
\`\`\`
## Project Root ## Project Root
${process.cwd()} ${process.cwd()}
## Output Requirements ## Output Requirements
**IMPORTANT**: Write solutions DIRECTLY to files, do NOT return full solution content. **IMPORTANT**: Register solutions via CLI, do NOT write files directly.
### 1. Write Solution Files ### 1. Register Solutions via CLI
For each issue, write solution to: \`.workflow/issues/solutions/{issue-id}.jsonl\` For each issue, save solution to temp file and register via CLI:
- Append one JSON line per solution \`\`\`bash
# Write solution JSON to temp file
echo '<solution-json>' > /tmp/sol-{issue-id}.json
# Register solution via CLI (generates SOL-xxx ID automatically)
ccw issue bind {issue-id} --solution /tmp/sol-{issue-id}.json
\`\`\`
- Solution must include all closed-loop task fields (see Solution Format below) - Solution must include all closed-loop task fields (see Solution Format below)
### 2. Return Summary Only ### 2. Return Summary Only
After writing solutions, return ONLY a brief JSON summary: After registering solutions, return ONLY a brief JSON summary:
\`\`\`json \`\`\`json
{ {
"planned": [ "planned": [
@@ -271,31 +249,34 @@ Each task MUST include ALL lifecycle phases:
// Collect issues needing user selection (multiple solutions) // Collect issues needing user selection (multiple solutions)
const needSelection = []; const needSelection = [];
for (const issue of issues) { for (const issueId of issueIds) {
const solPath = `.workflow/issues/solutions/${issue.id}.jsonl`; // Get solutions via CLI
const statusJson = Bash(`ccw issue status ${issueId} --json 2>/dev/null || echo '{}'`).trim();
const status = JSON.parse(statusJson);
const solutions = status.solutions || [];
// Use jq to count solutions if (solutions.length === 0) continue; // No solutions - skip silently (agent already reported)
const count = parseInt(Bash(`cat "${solPath}" 2>/dev/null | jq -s 'length' 2>/dev/null || echo '0'`).trim()) || 0;
if (count === 0) continue; // No solutions - skip silently (agent already reported) if (solutions.length === 1) {
if (count === 1) {
// Auto-bind single solution // Auto-bind single solution
const solId = Bash(`cat "${solPath}" | jq -r '.id' | head -1`).trim(); bindSolution(issueId, solutions[0].id);
bindSolution(issue.id, solId);
} else { } else {
// Multiple solutions - collect for batch selection // Multiple solutions - collect for batch selection
const options = Bash(`cat "${solPath}" | jq -c '{id, description, task_count: (.tasks | length)}'`).trim(); const options = solutions.map(s => ({
needSelection.push({ issue, options: options.split('\n').map(s => JSON.parse(s)) }); id: s.id,
description: s.description,
task_count: (s.tasks || []).length
}));
needSelection.push({ issueId, options });
} }
} }
// Batch ask user for multiple-solution issues // Batch ask user for multiple-solution issues
if (needSelection.length > 0) { if (needSelection.length > 0) {
const answer = AskUserQuestion({ const answer = AskUserQuestion({
questions: needSelection.map(({ issue, options }) => ({ questions: needSelection.map(({ issueId, options }) => ({
question: `Select solution for ${issue.id}:`, question: `Select solution for ${issueId}:`,
header: issue.id, header: issueId,
multiSelect: false, multiSelect: false,
options: options.map(s => ({ options: options.map(s => ({
label: `${s.id} (${s.task_count} tasks)`, label: `${s.id} (${s.task_count} tasks)`,
@@ -305,47 +286,27 @@ if (needSelection.length > 0) {
}); });
// Bind selected solutions // Bind selected solutions
for (const { issue } of needSelection) { for (const { issueId } of needSelection) {
const selectedSolId = extractSelectedSolutionId(answer, issue.id); const selectedSolId = extractSelectedSolutionId(answer, issueId);
if (selectedSolId) bindSolution(issue.id, selectedSolId); if (selectedSolId) bindSolution(issueId, selectedSolId);
} }
} }
// Helper: bind solution to issue // Helper: bind solution to issue (using CLI for safety)
function bindSolution(issueId, solutionId) { function bindSolution(issueId, solutionId) {
const now = new Date().toISOString(); Bash(`ccw issue bind ${issueId} ${solutionId}`);
const solPath = `.workflow/issues/solutions/${issueId}.jsonl`;
// Update issue status
Bash(`
tmpfile=$(mktemp) && \\
cat "${issuesPath}" | jq -c 'if .id == "${issueId}" then . + {
bound_solution_id: "${solutionId}", status: "planned",
planned_at: "${now}", updated_at: "${now}"
} else . end' > "$tmpfile" && mv "$tmpfile" "${issuesPath}"
`);
// Mark solution as bound
Bash(`
tmpfile=$(mktemp) && \\
cat "${solPath}" | jq -c 'if .id == "${solutionId}" then . + {
is_bound: true, bound_at: "${now}"
} else . + {is_bound: false} end' > "$tmpfile" && mv "$tmpfile" "${solPath}"
`);
} }
``` ```
### Phase 4: Summary ### Phase 4: Summary
```javascript ```javascript
// Brief summary using jq // Count planned issues via CLI
const stats = Bash(` const plannedIds = Bash(`ccw issue list --status planned --ids`).trim();
cat "${issuesPath}" 2>/dev/null | \\ const plannedCount = plannedIds ? plannedIds.split('\n').length : 0;
jq -s '[.[] | select(.status == "planned")] | length' 2>/dev/null || echo '0'
`).trim();
console.log(` console.log(`
## Done: ${issues.length} issues → ${stats} planned ## Done: ${issueIds.length} issues → ${plannedCount} planned
Next: \`/issue:queue\`\`/issue:execute\` Next: \`/issue:queue\`\`/issue:execute\`
`); `);

View File

@@ -77,10 +77,12 @@ Queue formation command using **issue-queue-agent** that analyzes all bound solu
# Flags # Flags
--issue <id> Form queue for specific issue only --issue <id> Form queue for specific issue only
--append <id> Append issue to active queue (don't create new) --append <id> Append issue to active queue (don't create new)
--list List all queues with status
--switch <queue-id> Switch active queue # CLI subcommands (ccw issue queue ...)
--archive Archive current queue (mark completed) ccw issue queue list List all queues with status
--clear <queue-id> Delete a queue from history ccw issue queue switch <queue-id> Switch active queue
ccw issue queue archive Archive current queue
ccw issue queue delete <queue-id> Delete queue from history
``` ```
## Execution Process ## Execution Process
@@ -234,7 +236,7 @@ Write(issuesPath, updatedIssues.map(i => JSON.stringify(i)).join('\n'));
console.log(` console.log(`
## Queue Formed ## Queue Formed
**Total Tasks**: ${queueOutput.queue.length} **Total Tasks**: ${queueOutput.tasks.length}
**Issues**: ${plannedIssues.length} **Issues**: ${plannedIssues.length}
**Conflicts**: ${queueOutput.conflicts?.length || 0} (${queueOutput._metadata?.resolved_conflicts || 0} resolved) **Conflicts**: ${queueOutput.conflicts?.length || 0} (${queueOutput._metadata?.resolved_conflicts || 0} resolved)
@@ -256,14 +258,13 @@ Output `queues/{queue-id}.json`:
```json ```json
{ {
"id": "QUE-20251227-143000",
"name": "Auth Feature Queue", "name": "Auth Feature Queue",
"status": "active", "status": "active",
"issue_ids": ["GH-123", "GH-124"], "issue_ids": ["GH-123", "GH-124"],
"queue": [ "tasks": [
{ {
"queue_id": "Q-001", "item_id": "T-1",
"issue_id": "GH-123", "issue_id": "GH-123",
"solution_id": "SOL-001", "solution_id": "SOL-001",
"task_id": "T1", "task_id": "T1",
@@ -271,8 +272,7 @@ Output `queues/{queue-id}.json`:
"execution_order": 1, "execution_order": 1,
"execution_group": "P1", "execution_group": "P1",
"depends_on": [], "depends_on": [],
"semantic_priority": 0.7, "semantic_priority": 0.7
"queued_at": "2025-12-26T10:00:00Z"
} }
], ],
@@ -289,17 +289,16 @@ Output `queues/{queue-id}.json`:
], ],
"execution_groups": [ "execution_groups": [
{ "id": "P1", "type": "parallel", "task_count": 3, "tasks": ["GH-123:T1", "GH-124:T1", "GH-125:T1"] }, { "id": "P1", "type": "parallel", "task_count": 3, "tasks": ["T-1", "T-2", "T-3"] },
{ "id": "S2", "type": "sequential", "task_count": 2, "tasks": ["GH-123:T2", "GH-124:T2"] } { "id": "S2", "type": "sequential", "task_count": 2, "tasks": ["T-4", "T-5"] }
], ],
"_metadata": { "_metadata": {
"version": "2.0", "version": "2.1-optimized",
"total_tasks": 5, "total_tasks": 5,
"pending_count": 3, "pending_count": 3,
"completed_count": 2, "completed_count": 2,
"failed_count": 0, "failed_count": 0,
"created_at": "2025-12-26T10:00:00Z",
"updated_at": "2025-12-26T11:00:00Z", "updated_at": "2025-12-26T11:00:00Z",
"source": "issue-queue-agent" "source": "issue-queue-agent"
} }

View File

@@ -21,7 +21,7 @@ WHILE task exists:
- TEST: Run task.test commands - TEST: Run task.test commands
- VERIFY: Check task.acceptance criteria - VERIFY: Check task.acceptance criteria
- COMMIT: Stage files, commit with task.commit.message_template - COMMIT: Stage files, commit with task.commit.message_template
3. Report completion via ccw issue complete <queue_id> 3. Report completion via ccw issue complete <item_id>
4. Fetch next task via ccw issue next 4. Fetch next task via ccw issue next
WHEN queue empty: WHEN queue empty:
@@ -37,7 +37,7 @@ ccw issue next
``` ```
This returns JSON with the full task definition: This returns JSON with the full task definition:
- `queue_id`: Unique ID for queue tracking (e.g., "Q-001") - `item_id`: Unique task identifier in queue (e.g., "T-1")
- `issue_id`: Parent issue ID (e.g., "ISSUE-20251227-001") - `issue_id`: Parent issue ID (e.g., "ISSUE-20251227-001")
- `task`: Full task definition with implementation steps - `task`: Full task definition with implementation steps
- `context`: Relevant files and patterns - `context`: Relevant files and patterns
@@ -51,7 +51,7 @@ Expected task structure:
```json ```json
{ {
"queue_id": "Q-001", "item_id": "T-1",
"issue_id": "ISSUE-20251227-001", "issue_id": "ISSUE-20251227-001",
"solution_id": "SOL-001", "solution_id": "SOL-001",
"task": { "task": {
@@ -159,7 +159,7 @@ git add path/to/file1.ts path/to/file2.ts ...
git commit -m "$(cat <<'EOF' git commit -m "$(cat <<'EOF'
[task.commit.message_template] [task.commit.message_template]
Queue-ID: [queue_id] Item-ID: [item_id]
Issue-ID: [issue_id] Issue-ID: [issue_id]
Task-ID: [task.id] Task-ID: [task.id]
EOF EOF
@@ -180,7 +180,7 @@ EOF
After commit succeeds, report to queue system: After commit succeeds, report to queue system:
```bash ```bash
ccw issue complete [queue_id] --result '{ ccw issue complete [item_id] --result '{
"files_modified": ["path1", "path2"], "files_modified": ["path1", "path2"],
"tests_passed": true, "tests_passed": true,
"acceptance_passed": true, "acceptance_passed": true,
@@ -193,7 +193,7 @@ ccw issue complete [queue_id] --result '{
**If task failed and cannot be fixed:** **If task failed and cannot be fixed:**
```bash ```bash
ccw issue fail [queue_id] --reason "Phase [X] failed: [details]" ccw issue fail [item_id] --reason "Phase [X] failed: [details]"
``` ```
## Step 5: Continue to Next Task ## Step 5: Continue to Next Task
@@ -206,7 +206,7 @@ ccw issue next
**Output progress:** **Output progress:**
``` ```
✓ [N/M] Completed: [queue_id] - [task.title] ✓ [N/M] Completed: [item_id] - [task.title]
→ Fetching next task... → Fetching next task...
``` ```
@@ -221,10 +221,10 @@ When `ccw issue next` returns `{ "status": "empty" }`:
**Total Tasks Executed**: N **Total Tasks Executed**: N
**All Commits**: **All Commits**:
| # | Queue ID | Task | Commit | | # | Item ID | Task | Commit |
|---|----------|------|--------| |---|---------|------|--------|
| 1 | Q-001 | Task title | abc123 | | 1 | T-1 | Task title | abc123 |
| 2 | Q-002 | Task title | def456 | | 2 | T-2 | Task title | def456 |
**Files Modified**: **Files Modified**:
- path/to/file1.ts - path/to/file1.ts

View File

@@ -277,6 +277,7 @@ export function run(argv: string[]): void {
.option('--priority <n>', 'Task priority (1-5)') .option('--priority <n>', 'Task priority (1-5)')
.option('--format <fmt>', 'Output format: json, markdown') .option('--format <fmt>', 'Output format: json, markdown')
.option('--json', 'Output as JSON') .option('--json', 'Output as JSON')
.option('--ids', 'List only IDs (one per line, for scripting)')
.option('--force', 'Force operation') .option('--force', 'Force operation')
// New options for solution/queue management // New options for solution/queue management
.option('--solution <path>', 'Solution JSON file path') .option('--solution <path>', 'Solution JSON file path')

View File

@@ -5,7 +5,7 @@
*/ */
import chalk from 'chalk'; import chalk from 'chalk';
import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync } from 'fs';
import { join, resolve } from 'path'; import { join, resolve } from 'path';
// Handle EPIPE errors gracefully // Handle EPIPE errors gracefully
@@ -29,6 +29,18 @@ interface Issue {
source?: string; source?: string;
source_url?: string; source_url?: string;
labels?: string[]; labels?: string[];
// Agent workflow fields
affected_components?: string[];
lifecycle_requirements?: {
test_strategy?: 'unit' | 'integration' | 'e2e' | 'auto';
regression_scope?: 'full' | 'related' | 'affected';
commit_strategy?: 'per-task' | 'atomic' | 'squash';
};
problem_statement?: string;
expected_behavior?: string;
actual_behavior?: string;
reproduction_steps?: string[];
// Timestamps
created_at: string; created_at: string;
updated_at: string; updated_at: string;
planned_at?: string; planned_at?: string;
@@ -100,17 +112,17 @@ interface Solution {
} }
interface QueueItem { interface QueueItem {
queue_id: string; item_id: string; // Task item ID in queue: T-1, T-2, ... (formerly queue_id)
issue_id: string; issue_id: string;
solution_id: string; solution_id: string;
task_id: string; task_id: string;
title?: string;
status: 'pending' | 'ready' | 'executing' | 'completed' | 'failed' | 'blocked'; status: 'pending' | 'ready' | 'executing' | 'completed' | 'failed' | 'blocked';
execution_order: number; execution_order: number;
execution_group: string; execution_group: string;
depends_on: string[]; depends_on: string[];
semantic_priority: number; semantic_priority: number;
assigned_executor: 'codex' | 'gemini' | 'agent'; assigned_executor: 'codex' | 'gemini' | 'agent';
queued_at: string;
started_at?: string; started_at?: string;
completed_at?: string; completed_at?: string;
result?: Record<string, any>; result?: Record<string, any>;
@@ -118,11 +130,11 @@ interface QueueItem {
} }
interface Queue { interface Queue {
id: string; // Queue unique ID: QUE-YYYYMMDD-HHMMSS id: string; // Queue unique ID: QUE-YYYYMMDD-HHMMSS (derived from filename)
name?: string; // Optional queue name name?: string; // Optional queue name
status: 'active' | 'completed' | 'archived' | 'failed'; status: 'active' | 'completed' | 'archived' | 'failed';
issue_ids: string[]; // Issues in this queue issue_ids: string[]; // Issues in this queue
queue: QueueItem[]; tasks: QueueItem[]; // Task items (formerly 'queue')
conflicts: any[]; conflicts: any[];
execution_groups?: any[]; execution_groups?: any[];
_metadata: { _metadata: {
@@ -132,13 +144,12 @@ interface Queue {
executing_count: number; executing_count: number;
completed_count: number; completed_count: number;
failed_count: number; failed_count: number;
created_at: string;
updated_at: string; updated_at: string;
}; };
} }
interface QueueIndex { interface QueueIndex {
active_queue_id: string | null; active_item_id: string | null;
queues: { queues: {
id: string; id: string;
status: string; status: string;
@@ -162,6 +173,7 @@ interface IssueOptions {
json?: boolean; json?: boolean;
force?: boolean; force?: boolean;
fail?: boolean; fail?: boolean;
ids?: boolean; // List only IDs (one per line)
} }
const ISSUES_DIR = '.workflow/issues'; const ISSUES_DIR = '.workflow/issues';
@@ -278,7 +290,7 @@ function ensureQueuesDir(): void {
function readQueueIndex(): QueueIndex { function readQueueIndex(): QueueIndex {
const path = join(getQueuesDir(), 'index.json'); const path = join(getQueuesDir(), 'index.json');
if (!existsSync(path)) { if (!existsSync(path)) {
return { active_queue_id: null, queues: [] }; return { active_item_id: null, queues: [] };
} }
return JSON.parse(readFileSync(path, 'utf-8')); return JSON.parse(readFileSync(path, 'utf-8'));
} }
@@ -319,16 +331,15 @@ function createEmptyQueue(): Queue {
id: generateQueueFileId(), id: generateQueueFileId(),
status: 'active', status: 'active',
issue_ids: [], issue_ids: [],
queue: [], tasks: [],
conflicts: [], conflicts: [],
_metadata: { _metadata: {
version: '2.0', version: '2.1',
total_tasks: 0, total_tasks: 0,
pending_count: 0, pending_count: 0,
executing_count: 0, executing_count: 0,
completed_count: 0, completed_count: 0,
failed_count: 0, failed_count: 0,
created_at: new Date().toISOString(),
updated_at: new Date().toISOString() updated_at: new Date().toISOString()
} }
}; };
@@ -338,11 +349,11 @@ function writeQueue(queue: Queue): void {
ensureQueuesDir(); ensureQueuesDir();
// Update metadata counts // Update metadata counts
queue._metadata.total_tasks = queue.queue.length; queue._metadata.total_tasks = queue.tasks.length;
queue._metadata.pending_count = queue.queue.filter(q => q.status === 'pending').length; queue._metadata.pending_count = queue.tasks.filter(q => q.status === 'pending').length;
queue._metadata.executing_count = queue.queue.filter(q => q.status === 'executing').length; queue._metadata.executing_count = queue.tasks.filter(q => q.status === 'executing').length;
queue._metadata.completed_count = queue.queue.filter(q => q.status === 'completed').length; queue._metadata.completed_count = queue.tasks.filter(q => q.status === 'completed').length;
queue._metadata.failed_count = queue.queue.filter(q => q.status === 'failed').length; queue._metadata.failed_count = queue.tasks.filter(q => q.status === 'failed').length;
queue._metadata.updated_at = new Date().toISOString(); queue._metadata.updated_at = new Date().toISOString();
// Write queue file // Write queue file
@@ -359,7 +370,7 @@ function writeQueue(queue: Queue): void {
issue_ids: queue.issue_ids, issue_ids: queue.issue_ids,
total_tasks: queue._metadata.total_tasks, total_tasks: queue._metadata.total_tasks,
completed_tasks: queue._metadata.completed_count, completed_tasks: queue._metadata.completed_count,
created_at: queue._metadata.created_at, created_at: queue.id.replace('QUE-', '').replace(/(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})/, '$1-$2-$3T$4:$5:$6Z'), // Derive from ID
completed_at: queue.status === 'completed' ? new Date().toISOString() : undefined completed_at: queue.status === 'completed' ? new Date().toISOString() : undefined
}; };
@@ -377,11 +388,11 @@ function writeQueue(queue: Queue): void {
} }
function generateQueueItemId(queue: Queue): string { function generateQueueItemId(queue: Queue): string {
const maxNum = queue.queue.reduce((max, q) => { const maxNum = queue.tasks.reduce((max, q) => {
const match = q.queue_id.match(/^Q-(\d+)$/); const match = q.item_id.match(/^T-(\d+)$/);
return match ? Math.max(max, parseInt(match[1])) : max; return match ? Math.max(max, parseInt(match[1])) : max;
}, 0); }, 0);
return `Q-${String(maxNum + 1).padStart(3, '0')}`; return `T-${maxNum + 1}`;
} }
// ============ Commands ============ // ============ Commands ============
@@ -429,7 +440,19 @@ async function initAction(issueId: string | undefined, options: IssueOptions): P
async function listAction(issueId: string | undefined, options: IssueOptions): Promise<void> { async function listAction(issueId: string | undefined, options: IssueOptions): Promise<void> {
if (!issueId) { if (!issueId) {
// List all issues // List all issues
const issues = readIssues(); let issues = readIssues();
// Filter by status if specified
if (options.status) {
const statuses = options.status.split(',').map(s => s.trim());
issues = issues.filter(i => statuses.includes(i.status));
}
// IDs only mode (one per line, for scripting)
if (options.ids) {
issues.forEach(i => console.log(i.id));
return;
}
if (options.json) { if (options.json) {
console.log(JSON.stringify(issues, null, 2)); console.log(JSON.stringify(issues, null, 2));
@@ -519,7 +542,8 @@ async function statusAction(issueId: string | undefined, options: IssueOptions):
const index = readQueueIndex(); const index = readQueueIndex();
if (options.json) { if (options.json) {
console.log(JSON.stringify({ queue: queue._metadata, issues: issues.length, queues: index.queues.length }, null, 2)); // Return full queue for programmatic access
console.log(JSON.stringify(queue, null, 2));
return; return;
} }
@@ -806,7 +830,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
// Archive current queue // Archive current queue
if (subAction === 'archive') { if (subAction === 'archive') {
const queue = readActiveQueue(); const queue = readActiveQueue();
if (!queue.id || queue.queue.length === 0) { if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue to archive')); console.log(chalk.yellow('No active queue to archive'));
return; return;
} }
@@ -822,6 +846,31 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
return; return;
} }
// Delete queue from history
if ((subAction === 'clear' || subAction === 'delete') && issueId) {
const queueId = issueId; // issueId is actually queue ID here
const queuePath = join(getQueuesDir(), `${queueId}.json`);
if (!existsSync(queuePath)) {
console.error(chalk.red(`Queue "${queueId}" not found`));
process.exit(1);
}
// Remove from index
const index = readQueueIndex();
index.queues = index.queues.filter(q => q.id !== queueId);
if (index.active_queue_id === queueId) {
index.active_queue_id = null;
}
writeQueueIndex(index);
// Delete queue file
unlinkSync(queuePath);
console.log(chalk.green(`✓ Deleted queue ${queueId}`));
return;
}
// Add issue tasks to queue // Add issue tasks to queue
if (subAction === 'add' && issueId) { if (subAction === 'add' && issueId) {
const issue = findIssue(issueId); const issue = findIssue(issueId);
@@ -839,7 +888,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
// Get or create active queue (create new if current is completed/archived) // Get or create active queue (create new if current is completed/archived)
let queue = readActiveQueue(); let queue = readActiveQueue();
const isNewQueue = queue.queue.length === 0 || queue.status !== 'active'; const isNewQueue = queue.tasks.length === 0 || queue.status !== 'active';
if (queue.status !== 'active') { if (queue.status !== 'active') {
// Create new queue if current is not active // Create new queue if current is not active
@@ -853,24 +902,23 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
let added = 0; let added = 0;
for (const task of solution.tasks) { for (const task of solution.tasks) {
const exists = queue.queue.some(q => q.issue_id === issueId && q.task_id === task.id); const exists = queue.tasks.some(q => q.issue_id === issueId && q.task_id === task.id);
if (exists) continue; if (exists) continue;
queue.queue.push({ queue.tasks.push({
queue_id: generateQueueItemId(queue), item_id: generateQueueItemId(queue),
issue_id: issueId, issue_id: issueId,
solution_id: solution.id, solution_id: solution.id,
task_id: task.id, task_id: task.id,
status: 'pending', status: 'pending',
execution_order: queue.queue.length + 1, execution_order: queue.tasks.length + 1,
execution_group: 'P1', execution_group: 'P1',
depends_on: task.depends_on.map(dep => { depends_on: task.depends_on.map(dep => {
const depItem = queue.queue.find(q => q.task_id === dep && q.issue_id === issueId); const depItem = queue.tasks.find(q => q.task_id === dep && q.issue_id === issueId);
return depItem?.queue_id || dep; return depItem?.item_id || dep;
}), }),
semantic_priority: 0.5, semantic_priority: 0.5,
assigned_executor: task.executor === 'auto' ? 'codex' : task.executor as any, assigned_executor: task.executor === 'auto' ? 'codex' : task.executor as any
queued_at: new Date().toISOString()
}); });
added++; added++;
} }
@@ -895,7 +943,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
console.log(chalk.bold.cyan('\nActive Queue\n')); console.log(chalk.bold.cyan('\nActive Queue\n'));
if (!queue.id || queue.queue.length === 0) { if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue')); console.log(chalk.yellow('No active queue'));
console.log(chalk.gray('Create one: ccw issue queue add <issue-id>')); console.log(chalk.gray('Create one: ccw issue queue add <issue-id>'));
console.log(chalk.gray('Or list history: ccw issue queue list')); console.log(chalk.gray('Or list history: ccw issue queue list'));
@@ -910,7 +958,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
console.log(chalk.gray('QueueID'.padEnd(10) + 'Issue'.padEnd(15) + 'Task'.padEnd(8) + 'Status'.padEnd(12) + 'Executor')); console.log(chalk.gray('QueueID'.padEnd(10) + 'Issue'.padEnd(15) + 'Task'.padEnd(8) + 'Status'.padEnd(12) + 'Executor'));
console.log(chalk.gray('-'.repeat(60))); console.log(chalk.gray('-'.repeat(60)));
for (const item of queue.queue) { for (const item of queue.tasks) {
const statusColor = { const statusColor = {
'pending': chalk.gray, 'pending': chalk.gray,
'ready': chalk.cyan, 'ready': chalk.cyan,
@@ -921,7 +969,7 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
}[item.status] || chalk.white; }[item.status] || chalk.white;
console.log( console.log(
item.queue_id.padEnd(10) + item.item_id.padEnd(10) +
item.issue_id.substring(0, 13).padEnd(15) + item.issue_id.substring(0, 13).padEnd(15) +
item.task_id.padEnd(8) + item.task_id.padEnd(8) +
statusColor(item.status.padEnd(12)) + statusColor(item.status.padEnd(12)) +
@@ -937,13 +985,13 @@ async function nextAction(options: IssueOptions): Promise<void> {
const queue = readActiveQueue(); const queue = readActiveQueue();
// Priority 1: Resume executing tasks (interrupted/crashed) // Priority 1: Resume executing tasks (interrupted/crashed)
const executingTasks = queue.queue.filter(item => item.status === 'executing'); const executingTasks = queue.tasks.filter(item => item.status === 'executing');
// Priority 2: Find pending tasks with satisfied dependencies // Priority 2: Find pending tasks with satisfied dependencies
const pendingTasks = queue.queue.filter(item => { const pendingTasks = queue.tasks.filter(item => {
if (item.status !== 'pending') return false; if (item.status !== 'pending') return false;
return item.depends_on.every(depId => { return item.depends_on.every(depId => {
const dep = queue.queue.find(q => q.queue_id === depId); const dep = queue.tasks.find(q => q.item_id === depId);
return !dep || dep.status === 'completed'; return !dep || dep.status === 'completed';
}); });
}); });
@@ -976,25 +1024,25 @@ async function nextAction(options: IssueOptions): Promise<void> {
// Only update status if not already executing (new task) // Only update status if not already executing (new task)
if (!isResume) { if (!isResume) {
const idx = queue.queue.findIndex(q => q.queue_id === nextItem.queue_id); const idx = queue.tasks.findIndex(q => q.item_id === nextItem.item_id);
queue.queue[idx].status = 'executing'; queue.tasks[idx].status = 'executing';
queue.queue[idx].started_at = new Date().toISOString(); queue.tasks[idx].started_at = new Date().toISOString();
writeQueue(queue); writeQueue(queue);
updateIssue(nextItem.issue_id, { status: 'executing' }); updateIssue(nextItem.issue_id, { status: 'executing' });
} }
// Calculate queue stats for context // Calculate queue stats for context
const stats = { const stats = {
total: queue.queue.length, total: queue.tasks.length,
completed: queue.queue.filter(q => q.status === 'completed').length, completed: queue.tasks.filter(q => q.status === 'completed').length,
failed: queue.queue.filter(q => q.status === 'failed').length, failed: queue.tasks.filter(q => q.status === 'failed').length,
executing: executingTasks.length, executing: executingTasks.length,
pending: pendingTasks.length pending: pendingTasks.length
}; };
const remaining = stats.pending + stats.executing; const remaining = stats.pending + stats.executing;
console.log(JSON.stringify({ console.log(JSON.stringify({
queue_id: nextItem.queue_id, item_id: nextItem.item_id,
issue_id: nextItem.issue_id, issue_id: nextItem.issue_id,
solution_id: nextItem.solution_id, solution_id: nextItem.solution_id,
task: taskDef, task: taskDef,
@@ -1025,7 +1073,7 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
} }
const queue = readActiveQueue(); const queue = readActiveQueue();
const idx = queue.queue.findIndex(q => q.queue_id === queueId); const idx = queue.tasks.findIndex(q => q.item_id === queueId);
if (idx === -1) { if (idx === -1) {
console.error(chalk.red(`Queue item "${queueId}" not found`)); console.error(chalk.red(`Queue item "${queueId}" not found`));
@@ -1033,22 +1081,22 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
} }
const isFail = options.fail; const isFail = options.fail;
queue.queue[idx].status = isFail ? 'failed' : 'completed'; queue.tasks[idx].status = isFail ? 'failed' : 'completed';
queue.queue[idx].completed_at = new Date().toISOString(); queue.tasks[idx].completed_at = new Date().toISOString();
if (isFail) { if (isFail) {
queue.queue[idx].failure_reason = options.reason || 'Unknown failure'; queue.tasks[idx].failure_reason = options.reason || 'Unknown failure';
} else if (options.result) { } else if (options.result) {
try { try {
queue.queue[idx].result = JSON.parse(options.result); queue.tasks[idx].result = JSON.parse(options.result);
} catch { } catch {
console.warn(chalk.yellow('Warning: Could not parse result JSON')); console.warn(chalk.yellow('Warning: Could not parse result JSON'));
} }
} }
// Check if all issue tasks are complete // Check if all issue tasks are complete
const issueId = queue.queue[idx].issue_id; const issueId = queue.tasks[idx].issue_id;
const issueTasks = queue.queue.filter(q => q.issue_id === issueId); const issueTasks = queue.tasks.filter(q => q.issue_id === issueId);
const allIssueComplete = issueTasks.every(q => q.status === 'completed'); const allIssueComplete = issueTasks.every(q => q.status === 'completed');
const anyIssueFailed = issueTasks.some(q => q.status === 'failed'); const anyIssueFailed = issueTasks.some(q => q.status === 'failed');
@@ -1064,13 +1112,13 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
} }
// Check if entire queue is complete // Check if entire queue is complete
const allQueueComplete = queue.queue.every(q => q.status === 'completed'); const allQueueComplete = queue.tasks.every(q => q.status === 'completed');
const anyQueueFailed = queue.queue.some(q => q.status === 'failed'); const anyQueueFailed = queue.tasks.some(q => q.status === 'failed');
if (allQueueComplete) { if (allQueueComplete) {
queue.status = 'completed'; queue.status = 'completed';
console.log(chalk.green(`\n✓ Queue ${queue.id} completed (all tasks done)`)); console.log(chalk.green(`\n✓ Queue ${queue.id} completed (all tasks done)`));
} else if (anyQueueFailed && queue.queue.every(q => q.status === 'completed' || q.status === 'failed')) { } else if (anyQueueFailed && queue.tasks.every(q => q.status === 'completed' || q.status === 'failed')) {
queue.status = 'failed'; queue.status = 'failed';
console.log(chalk.yellow(`\n⚠ Queue ${queue.id} has failed tasks`)); console.log(chalk.yellow(`\n⚠ Queue ${queue.id} has failed tasks`));
} }
@@ -1079,24 +1127,20 @@ async function doneAction(queueId: string | undefined, options: IssueOptions): P
} }
/** /**
* retry - Retry failed tasks, or reset stuck executing tasks (--force) * retry - Reset failed tasks to pending for re-execution
*/ */
async function retryAction(issueId: string | undefined, options: IssueOptions): Promise<void> { async function retryAction(issueId: string | undefined, options: IssueOptions): Promise<void> {
const queue = readActiveQueue(); const queue = readActiveQueue();
if (!queue.id || queue.queue.length === 0) { if (!queue.id || queue.tasks.length === 0) {
console.log(chalk.yellow('No active queue')); console.log(chalk.yellow('No active queue'));
return; return;
} }
let updated = 0; let updated = 0;
// Check for stuck executing tasks (started > 30 min ago with no completion) for (const item of queue.tasks) {
const stuckThreshold = 30 * 60 * 1000; // 30 minutes // Retry failed tasks only
const now = Date.now();
for (const item of queue.queue) {
// Retry failed tasks
if (item.status === 'failed') { if (item.status === 'failed') {
if (!issueId || item.issue_id === issueId) { if (!issueId || item.issue_id === issueId) {
item.status = 'pending'; item.status = 'pending';
@@ -1106,23 +1150,11 @@ async function retryAction(issueId: string | undefined, options: IssueOptions):
updated++; updated++;
} }
} }
// Reset stuck executing tasks (optional: use --force or --reset-stuck)
else if (item.status === 'executing' && options.force) {
const startedAt = item.started_at ? new Date(item.started_at).getTime() : 0;
if (now - startedAt > stuckThreshold) {
if (!issueId || item.issue_id === issueId) {
console.log(chalk.yellow(`Resetting stuck task: ${item.queue_id} (started ${Math.round((now - startedAt) / 60000)} min ago)`));
item.status = 'pending';
item.started_at = undefined;
updated++;
}
}
}
} }
if (updated === 0) { if (updated === 0) {
console.log(chalk.yellow('No failed/stuck tasks to retry')); console.log(chalk.yellow('No failed tasks to retry'));
console.log(chalk.gray('Use --force to reset stuck executing tasks (>30 min)')); console.log(chalk.gray('Note: Interrupted (executing) tasks are auto-resumed by "ccw issue next"'));
return; return;
} }
@@ -1203,7 +1235,8 @@ export async function issueCommand(
console.log(chalk.gray(' queue add <issue-id> Add issue to active queue (or create new)')); console.log(chalk.gray(' queue add <issue-id> Add issue to active queue (or create new)'));
console.log(chalk.gray(' queue switch <queue-id> Switch active queue')); console.log(chalk.gray(' queue switch <queue-id> Switch active queue'));
console.log(chalk.gray(' queue archive Archive current queue')); console.log(chalk.gray(' queue archive Archive current queue'));
console.log(chalk.gray(' retry [issue-id] [--force] Retry failed/stuck tasks')); console.log(chalk.gray(' queue delete <queue-id> Delete queue from history'));
console.log(chalk.gray(' retry [issue-id] Retry failed tasks'));
console.log(); console.log();
console.log(chalk.bold('Execution Endpoints:')); console.log(chalk.bold('Execution Endpoints:'));
console.log(chalk.gray(' next Get next ready task (JSON)')); console.log(chalk.gray(' next Get next ready task (JSON)'));
@@ -1212,6 +1245,8 @@ export async function issueCommand(
console.log(); console.log();
console.log(chalk.bold('Options:')); console.log(chalk.bold('Options:'));
console.log(chalk.gray(' --title <title> Issue/task title')); console.log(chalk.gray(' --title <title> Issue/task title'));
console.log(chalk.gray(' --status <status> Filter by status (comma-separated)'));
console.log(chalk.gray(' --ids List only IDs (one per line)'));
console.log(chalk.gray(' --solution <path> Solution JSON file')); console.log(chalk.gray(' --solution <path> Solution JSON file'));
console.log(chalk.gray(' --result <json> Execution result')); console.log(chalk.gray(' --result <json> Execution result'));
console.log(chalk.gray(' --reason <text> Failure reason')); console.log(chalk.gray(' --reason <text> Failure reason'));

View File

@@ -5,7 +5,9 @@
* Storage Structure: * Storage Structure:
* .workflow/issues/ * .workflow/issues/
* ├── issues.jsonl # All issues (one per line) * ├── issues.jsonl # All issues (one per line)
* ├── queue.json # Execution queue * ├── queues/ # Queue history directory
* │ ├── index.json # Queue index (active + history)
* │ └── {queue-id}.json # Individual queue files
* └── solutions/ * └── solutions/
* ├── {issue-id}.jsonl # Solutions for issue (one per line) * ├── {issue-id}.jsonl # Solutions for issue (one per line)
* └── ... * └── ...
@@ -102,12 +104,12 @@ function readQueue(issuesDir: string) {
} }
} }
return { queue: [], conflicts: [], execution_groups: [], _metadata: { version: '1.0', total_tasks: 0 } }; return { tasks: [], conflicts: [], execution_groups: [], _metadata: { version: '1.0', total_tasks: 0 } };
} }
function writeQueue(issuesDir: string, queue: any) { function writeQueue(issuesDir: string, queue: any) {
if (!existsSync(issuesDir)) mkdirSync(issuesDir, { recursive: true }); if (!existsSync(issuesDir)) mkdirSync(issuesDir, { recursive: true });
queue._metadata = { ...queue._metadata, updated_at: new Date().toISOString(), total_tasks: queue.queue?.length || 0 }; queue._metadata = { ...queue._metadata, updated_at: new Date().toISOString(), total_tasks: queue.tasks?.length || 0 };
// Check if using new multi-queue structure // Check if using new multi-queue structure
const queuesDir = join(issuesDir, 'queues'); const queuesDir = join(issuesDir, 'queues');
@@ -123,8 +125,8 @@ function writeQueue(issuesDir: string, queue: any) {
const index = JSON.parse(readFileSync(indexPath, 'utf8')); const index = JSON.parse(readFileSync(indexPath, 'utf8'));
const queueEntry = index.queues?.find((q: any) => q.id === queue.id); const queueEntry = index.queues?.find((q: any) => q.id === queue.id);
if (queueEntry) { if (queueEntry) {
queueEntry.total_tasks = queue.queue?.length || 0; queueEntry.total_tasks = queue.tasks?.length || 0;
queueEntry.completed_tasks = queue.queue?.filter((i: any) => i.status === 'completed').length || 0; queueEntry.completed_tasks = queue.tasks?.filter((i: any) => i.status === 'completed').length || 0;
writeFileSync(indexPath, JSON.stringify(index, null, 2)); writeFileSync(indexPath, JSON.stringify(index, null, 2));
} }
} catch { } catch {
@@ -151,15 +153,29 @@ function getIssueDetail(issuesDir: string, issueId: string) {
} }
function enrichIssues(issues: any[], issuesDir: string) { function enrichIssues(issues: any[], issuesDir: string) {
return issues.map(issue => ({ return issues.map(issue => {
...issue, const solutions = readSolutionsJsonl(issuesDir, issue.id);
solution_count: readSolutionsJsonl(issuesDir, issue.id).length let taskCount = 0;
}));
// Get task count from bound solution
if (issue.bound_solution_id) {
const boundSol = solutions.find(s => s.id === issue.bound_solution_id);
if (boundSol?.tasks) {
taskCount = boundSol.tasks.length;
}
}
return {
...issue,
solution_count: solutions.length,
task_count: taskCount
};
});
} }
function groupQueueByExecutionGroup(queue: any) { function groupQueueByExecutionGroup(queue: any) {
const groups: { [key: string]: any[] } = {}; const groups: { [key: string]: any[] } = {};
for (const item of queue.queue || []) { for (const item of queue.tasks || []) {
const groupId = item.execution_group || 'ungrouped'; const groupId = item.execution_group || 'ungrouped';
if (!groups[groupId]) groups[groupId] = []; if (!groups[groupId]) groups[groupId] = [];
groups[groupId].push(item); groups[groupId].push(item);
@@ -171,7 +187,7 @@ function groupQueueByExecutionGroup(queue: any) {
id, id,
type: id.startsWith('P') ? 'parallel' : id.startsWith('S') ? 'sequential' : 'unknown', type: id.startsWith('P') ? 'parallel' : id.startsWith('S') ? 'sequential' : 'unknown',
task_count: items.length, task_count: items.length,
tasks: items.map(i => i.queue_id) tasks: items.map(i => i.item_id)
})).sort((a, b) => { })).sort((a, b) => {
const aFirst = groups[a.id]?.[0]?.execution_order || 0; const aFirst = groups[a.id]?.[0]?.execution_order || 0;
const bFirst = groups[b.id]?.[0]?.execution_order || 0; const bFirst = groups[b.id]?.[0]?.execution_order || 0;
@@ -229,20 +245,20 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
} }
const queue = readQueue(issuesDir); const queue = readQueue(issuesDir);
const groupItems = queue.queue.filter((item: any) => item.execution_group === groupId); const groupItems = queue.tasks.filter((item: any) => item.execution_group === groupId);
const otherItems = queue.queue.filter((item: any) => item.execution_group !== groupId); const otherItems = queue.tasks.filter((item: any) => item.execution_group !== groupId);
if (groupItems.length === 0) return { error: `No items in group ${groupId}` }; if (groupItems.length === 0) return { error: `No items in group ${groupId}` };
const groupQueueIds = new Set(groupItems.map((i: any) => i.queue_id)); const groupItemIds = new Set(groupItems.map((i: any) => i.item_id));
if (groupQueueIds.size !== new Set(newOrder).size) { if (groupItemIds.size !== new Set(newOrder).size) {
return { error: 'newOrder must contain all group items' }; return { error: 'newOrder must contain all group items' };
} }
for (const id of newOrder) { for (const id of newOrder) {
if (!groupQueueIds.has(id)) return { error: `Invalid queue_id: ${id}` }; if (!groupItemIds.has(id)) return { error: `Invalid item_id: ${id}` };
} }
const itemMap = new Map(groupItems.map((i: any) => [i.queue_id, i])); const itemMap = new Map(groupItems.map((i: any) => [i.item_id, i]));
const reorderedItems = newOrder.map((qid: string, idx: number) => ({ ...itemMap.get(qid), _idx: idx })); const reorderedItems = newOrder.map((qid: string, idx: number) => ({ ...itemMap.get(qid), _idx: idx }));
const newQueue = [...otherItems, ...reorderedItems].sort((a, b) => { const newQueue = [...otherItems, ...reorderedItems].sort((a, b) => {
const aGroup = parseInt(a.execution_group?.match(/\d+/)?.[0] || '999'); const aGroup = parseInt(a.execution_group?.match(/\d+/)?.[0] || '999');
@@ -255,7 +271,7 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
}); });
newQueue.forEach((item, idx) => { item.execution_order = idx + 1; delete item._idx; }); newQueue.forEach((item, idx) => { item.execution_order = idx + 1; delete item._idx; });
queue.queue = newQueue; queue.tasks = newQueue;
writeQueue(issuesDir, queue); writeQueue(issuesDir, queue);
return { success: true, groupId, reordered: newOrder.length }; return { success: true, groupId, reordered: newOrder.length };

View File

@@ -6,7 +6,7 @@
// ========== Issue State ========== // ========== Issue State ==========
var issueData = { var issueData = {
issues: [], issues: [],
queue: { queue: [], conflicts: [], execution_groups: [], grouped_items: {} }, queue: { tasks: [], conflicts: [], execution_groups: [], grouped_items: {} },
selectedIssue: null, selectedIssue: null,
selectedSolution: null, selectedSolution: null,
selectedSolutionIssueId: null, selectedSolutionIssueId: null,
@@ -65,7 +65,7 @@ async function loadQueueData() {
issueData.queue = await response.json(); issueData.queue = await response.json();
} catch (err) { } catch (err) {
console.error('Failed to load queue:', err); console.error('Failed to load queue:', err);
issueData.queue = { queue: [], conflicts: [], execution_groups: [], grouped_items: {} }; issueData.queue = { tasks: [], conflicts: [], execution_groups: [], grouped_items: {} };
} }
} }
@@ -360,7 +360,7 @@ function filterIssuesByStatus(status) {
// ========== Queue Section ========== // ========== Queue Section ==========
function renderQueueSection() { function renderQueueSection() {
const queue = issueData.queue; const queue = issueData.queue;
const queueItems = queue.queue || []; const queueItems = queue.tasks || [];
const metadata = queue._metadata || {}; const metadata = queue._metadata || {};
// Check if queue is empty // Check if queue is empty
@@ -530,10 +530,10 @@ function renderQueueItem(item, index, total) {
return ` return `
<div class="queue-item ${statusColors[item.status] || ''}" <div class="queue-item ${statusColors[item.status] || ''}"
draggable="true" draggable="true"
data-queue-id="${item.queue_id}" data-item-id="${item.item_id}"
data-group-id="${item.execution_group}" data-group-id="${item.execution_group}"
onclick="openQueueItemDetail('${item.queue_id}')"> onclick="openQueueItemDetail('${item.item_id}')">
<span class="queue-item-id font-mono text-xs">${item.queue_id}</span> <span class="queue-item-id font-mono text-xs">${item.item_id}</span>
<span class="queue-item-issue text-xs text-muted-foreground">${item.issue_id}</span> <span class="queue-item-issue text-xs text-muted-foreground">${item.issue_id}</span>
<span class="queue-item-task text-sm">${item.task_id}</span> <span class="queue-item-task text-sm">${item.task_id}</span>
<span class="queue-item-priority" style="opacity: ${item.semantic_priority || 0.5}"> <span class="queue-item-priority" style="opacity: ${item.semantic_priority || 0.5}">
@@ -586,12 +586,12 @@ function handleIssueDragStart(e) {
const item = e.target.closest('.queue-item'); const item = e.target.closest('.queue-item');
if (!item) return; if (!item) return;
issueDragState.dragging = item.dataset.queueId; issueDragState.dragging = item.dataset.itemId;
issueDragState.groupId = item.dataset.groupId; issueDragState.groupId = item.dataset.groupId;
item.classList.add('dragging'); item.classList.add('dragging');
e.dataTransfer.effectAllowed = 'move'; e.dataTransfer.effectAllowed = 'move';
e.dataTransfer.setData('text/plain', item.dataset.queueId); e.dataTransfer.setData('text/plain', item.dataset.itemId);
} }
function handleIssueDragEnd(e) { function handleIssueDragEnd(e) {
@@ -610,7 +610,7 @@ function handleIssueDragOver(e) {
e.preventDefault(); e.preventDefault();
const target = e.target.closest('.queue-item'); const target = e.target.closest('.queue-item');
if (!target || target.dataset.queueId === issueDragState.dragging) return; if (!target || target.dataset.itemId === issueDragState.dragging) return;
// Only allow drag within same group // Only allow drag within same group
if (target.dataset.groupId !== issueDragState.groupId) { if (target.dataset.groupId !== issueDragState.groupId) {
@@ -635,7 +635,7 @@ function handleIssueDrop(e) {
// Get new order // Get new order
const items = Array.from(container.querySelectorAll('.queue-item')); const items = Array.from(container.querySelectorAll('.queue-item'));
const draggedItem = items.find(i => i.dataset.queueId === issueDragState.dragging); const draggedItem = items.find(i => i.dataset.itemId === issueDragState.dragging);
const targetIndex = items.indexOf(target); const targetIndex = items.indexOf(target);
const draggedIndex = items.indexOf(draggedItem); const draggedIndex = items.indexOf(draggedItem);
@@ -649,7 +649,7 @@ function handleIssueDrop(e) {
} }
// Get new order and save // Get new order and save
const newOrder = Array.from(container.querySelectorAll('.queue-item')).map(i => i.dataset.queueId); const newOrder = Array.from(container.querySelectorAll('.queue-item')).map(i => i.dataset.itemId);
saveQueueOrder(issueDragState.groupId, newOrder); saveQueueOrder(issueDragState.groupId, newOrder);
} }
@@ -767,7 +767,7 @@ function renderIssueDetailPanel(issue) {
<div class="flex items-center justify-between"> <div class="flex items-center justify-between">
<span class="font-mono text-sm">${task.id}</span> <span class="font-mono text-sm">${task.id}</span>
<select class="task-status-select" onchange="updateTaskStatus('${issue.id}', '${task.id}', this.value)"> <select class="task-status-select" onchange="updateTaskStatus('${issue.id}', '${task.id}', this.value)">
${['pending', 'ready', 'in_progress', 'completed', 'failed', 'paused', 'skipped'].map(s => ${['pending', 'ready', 'executing', 'completed', 'failed', 'blocked', 'paused', 'skipped'].map(s =>
`<option value="${s}" ${task.status === s ? 'selected' : ''}>${s}</option>` `<option value="${s}" ${task.status === s ? 'selected' : ''}>${s}</option>`
).join('')} ).join('')}
</select> </select>
@@ -1145,8 +1145,8 @@ function escapeHtml(text) {
return div.innerHTML; return div.innerHTML;
} }
function openQueueItemDetail(queueId) { function openQueueItemDetail(itemId) {
const item = issueData.queue.queue?.find(q => q.queue_id === queueId); const item = issueData.queue.tasks?.find(q => q.item_id === itemId);
if (item) { if (item) {
openIssueDetail(item.issue_id); openIssueDetail(item.issue_id);
} }

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "claude-code-workflow", "name": "claude-code-workflow",
"version": "6.2.9", "version": "6.3.8",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "claude-code-workflow", "name": "claude-code-workflow",
"version": "6.2.9", "version": "6.3.8",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@modelcontextprotocol/sdk": "^1.0.4", "@modelcontextprotocol/sdk": "^1.0.4",

View File

@@ -1,6 +1,6 @@
{ {
"name": "claude-code-workflow", "name": "claude-code-workflow",
"version": "6.3.6", "version": "6.3.8",
"description": "JSON-driven multi-agent development framework with intelligent CLI orchestration (Gemini/Qwen/Codex), context-first architecture, and automated workflow execution", "description": "JSON-driven multi-agent development framework with intelligent CLI orchestration (Gemini/Qwen/Codex), context-first architecture, and automated workflow execution",
"type": "module", "type": "module",
"main": "ccw/src/index.js", "main": "ccw/src/index.js",