mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-14 02:42:04 +08:00
feat(queue): support solution-based queues and update metadata handling
This commit is contained in:
@@ -172,20 +172,47 @@ function decomposeTasks(issue, exploration) {
|
|||||||
- Task validation (all 5 phases present)
|
- Task validation (all 5 phases present)
|
||||||
- File isolation check (ensure minimal overlap across issues in batch)
|
- File isolation check (ensure minimal overlap across issues in batch)
|
||||||
|
|
||||||
**Solution Registration** (via CLI endpoint):
|
**Solution Registration** (via file write):
|
||||||
|
|
||||||
**Step 1: Create solutions**
|
**Step 1: Create solution files**
|
||||||
```bash
|
|
||||||
ccw issue solution <issue-id> --data '{"description":"...", "approach":"...", "tasks":[...]}'
|
Write solution JSON to JSONL file (one line per solution):
|
||||||
# Output: {"id":"SOL-{issue-id}-1", ...}
|
|
||||||
|
```
|
||||||
|
.workflow/issues/solutions/{issue-id}.jsonl
|
||||||
```
|
```
|
||||||
|
|
||||||
**CLI Features:**
|
**File Format** (JSONL - each line is a complete solution):
|
||||||
| Feature | Description |
|
```
|
||||||
|---------|-------------|
|
{"id":"SOL-GH-123-1","description":"...","approach":"...","analysis":{...},"score":0.85,"tasks":[...]}
|
||||||
| Auto-increment ID | `SOL-{issue-id}-{seq}` (e.g., `SOL-GH-123-1`) |
|
{"id":"SOL-GH-123-2","description":"...","approach":"...","analysis":{...},"score":0.75,"tasks":[...]}
|
||||||
| Multi-solution | Appends to existing JSONL, supports multiple per issue |
|
```
|
||||||
| Trailing newline | Proper JSONL format, no corruption |
|
|
||||||
|
**Solution Schema** (must match CLI `Solution` interface):
|
||||||
|
```typescript
|
||||||
|
{
|
||||||
|
id: string; // Format: SOL-{issue-id}-{N}
|
||||||
|
description?: string;
|
||||||
|
approach?: string;
|
||||||
|
tasks: SolutionTask[];
|
||||||
|
analysis?: { risk, impact, complexity };
|
||||||
|
score?: number;
|
||||||
|
// Note: is_bound, created_at are added by CLI on read
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Write Operation**:
|
||||||
|
```javascript
|
||||||
|
// Append solution to JSONL file (one line per solution)
|
||||||
|
const solutionId = `SOL-${issueId}-${seq}`;
|
||||||
|
const solutionLine = JSON.stringify({ id: solutionId, ...solution });
|
||||||
|
|
||||||
|
// Read existing, append new line, write back
|
||||||
|
const filePath = `.workflow/issues/solutions/${issueId}.jsonl`;
|
||||||
|
const existing = existsSync(filePath) ? readFileSync(filePath) : '';
|
||||||
|
const newContent = existing.trimEnd() + (existing ? '\n' : '') + solutionLine + '\n';
|
||||||
|
Write({ file_path: filePath, content: newContent })
|
||||||
|
```
|
||||||
|
|
||||||
**Step 2: Bind decision**
|
**Step 2: Bind decision**
|
||||||
- **Single solution** → Auto-bind: `ccw issue bind <issue-id> <solution-id>`
|
- **Single solution** → Auto-bind: `ccw issue bind <issue-id> <solution-id>`
|
||||||
@@ -251,9 +278,9 @@ Each line is a solution JSON containing tasks. Schema: `cat .claude/workflows/cl
|
|||||||
4. Quantify acceptance.criteria with testable conditions
|
4. Quantify acceptance.criteria with testable conditions
|
||||||
5. Validate DAG before output
|
5. Validate DAG before output
|
||||||
6. Evaluate each solution with `analysis` and `score`
|
6. Evaluate each solution with `analysis` and `score`
|
||||||
7. Use CLI endpoint: `ccw issue solution <issue-id> --data '{...}'`
|
7. Write solutions to `.workflow/issues/solutions/{issue-id}.jsonl` (append mode)
|
||||||
8. For HIGH complexity: generate 2-3 candidate solutions
|
8. For HIGH complexity: generate 2-3 candidate solutions
|
||||||
9. **Solution ID format**: `SOL-{issue-id}-{seq}` (e.g., `SOL-GH-123-1`, `SOL-GH-123-2`)
|
9. **Solution ID format**: `SOL-{issue-id}-{N}` (e.g., `SOL-GH-123-1`, `SOL-GH-123-2`)
|
||||||
|
|
||||||
**CONFLICT AVOIDANCE** (for batch processing of similar issues):
|
**CONFLICT AVOIDANCE** (for batch processing of similar issues):
|
||||||
1. **File isolation**: Each issue's solution should target distinct files when possible
|
1. **File isolation**: Each issue's solution should target distinct files when possible
|
||||||
@@ -270,6 +297,6 @@ Each line is a solution JSON containing tasks. Schema: `cat .claude/workflows/cl
|
|||||||
5. **Bind when multiple solutions exist** - MUST check `solutions.length === 1` before calling `ccw issue bind`
|
5. **Bind when multiple solutions exist** - MUST check `solutions.length === 1` before calling `ccw issue bind`
|
||||||
|
|
||||||
**OUTPUT**:
|
**OUTPUT**:
|
||||||
1. Create solutions via CLI: `ccw issue solution <issue-id> --data '{...}'`
|
1. Write solutions to `.workflow/issues/solutions/{issue-id}.jsonl` (JSONL format)
|
||||||
2. Single solution → `ccw issue bind <issue-id> <solution-id>`; Multiple → return only
|
2. Single solution → `ccw issue bind <issue-id> <solution-id>`; Multiple → return only
|
||||||
3. Return JSON with `bound`, `pending_selection`
|
3. Return JSON with `bound`, `pending_selection`
|
||||||
|
|||||||
@@ -120,7 +120,18 @@ function readQueue(issuesDir: string) {
|
|||||||
|
|
||||||
function writeQueue(issuesDir: string, queue: any) {
|
function writeQueue(issuesDir: string, queue: any) {
|
||||||
if (!existsSync(issuesDir)) mkdirSync(issuesDir, { recursive: true });
|
if (!existsSync(issuesDir)) mkdirSync(issuesDir, { recursive: true });
|
||||||
queue._metadata = { ...queue._metadata, updated_at: new Date().toISOString(), total_tasks: queue.tasks?.length || 0 };
|
|
||||||
|
// Support both solution-based and task-based queues
|
||||||
|
const items = queue.solutions || queue.tasks || [];
|
||||||
|
const isSolutionBased = Array.isArray(queue.solutions) && queue.solutions.length > 0;
|
||||||
|
|
||||||
|
queue._metadata = {
|
||||||
|
...queue._metadata,
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
...(isSolutionBased
|
||||||
|
? { total_solutions: items.length }
|
||||||
|
: { total_tasks: items.length })
|
||||||
|
};
|
||||||
|
|
||||||
// Check if using new multi-queue structure
|
// Check if using new multi-queue structure
|
||||||
const queuesDir = join(issuesDir, 'queues');
|
const queuesDir = join(issuesDir, 'queues');
|
||||||
@@ -136,8 +147,13 @@ function writeQueue(issuesDir: string, queue: any) {
|
|||||||
const index = JSON.parse(readFileSync(indexPath, 'utf8'));
|
const index = JSON.parse(readFileSync(indexPath, 'utf8'));
|
||||||
const queueEntry = index.queues?.find((q: any) => q.id === queue.id);
|
const queueEntry = index.queues?.find((q: any) => q.id === queue.id);
|
||||||
if (queueEntry) {
|
if (queueEntry) {
|
||||||
queueEntry.total_tasks = queue.tasks?.length || 0;
|
if (isSolutionBased) {
|
||||||
queueEntry.completed_tasks = queue.tasks?.filter((i: any) => i.status === 'completed').length || 0;
|
queueEntry.total_solutions = items.length;
|
||||||
|
queueEntry.completed_solutions = items.filter((i: any) => i.status === 'completed').length;
|
||||||
|
} else {
|
||||||
|
queueEntry.total_tasks = items.length;
|
||||||
|
queueEntry.completed_tasks = items.filter((i: any) => i.status === 'completed').length;
|
||||||
|
}
|
||||||
writeFileSync(indexPath, JSON.stringify(index, null, 2));
|
writeFileSync(indexPath, JSON.stringify(index, null, 2));
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
@@ -184,9 +200,26 @@ function enrichIssues(issues: any[], issuesDir: string) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get queue items (supports both solution-based and task-based queues)
|
||||||
|
*/
|
||||||
|
function getQueueItems(queue: any): any[] {
|
||||||
|
return queue.solutions || queue.tasks || [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if queue is solution-based
|
||||||
|
*/
|
||||||
|
function isSolutionBasedQueue(queue: any): boolean {
|
||||||
|
return Array.isArray(queue.solutions) && queue.solutions.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
function groupQueueByExecutionGroup(queue: any) {
|
function groupQueueByExecutionGroup(queue: any) {
|
||||||
const groups: { [key: string]: any[] } = {};
|
const groups: { [key: string]: any[] } = {};
|
||||||
for (const item of queue.tasks || []) {
|
const items = getQueueItems(queue);
|
||||||
|
const isSolutionBased = isSolutionBasedQueue(queue);
|
||||||
|
|
||||||
|
for (const item of items) {
|
||||||
const groupId = item.execution_group || 'ungrouped';
|
const groupId = item.execution_group || 'ungrouped';
|
||||||
if (!groups[groupId]) groups[groupId] = [];
|
if (!groups[groupId]) groups[groupId] = [];
|
||||||
groups[groupId].push(item);
|
groups[groupId].push(item);
|
||||||
@@ -194,11 +227,13 @@ function groupQueueByExecutionGroup(queue: any) {
|
|||||||
for (const groupId of Object.keys(groups)) {
|
for (const groupId of Object.keys(groups)) {
|
||||||
groups[groupId].sort((a, b) => (a.execution_order || 0) - (b.execution_order || 0));
|
groups[groupId].sort((a, b) => (a.execution_order || 0) - (b.execution_order || 0));
|
||||||
}
|
}
|
||||||
const executionGroups = Object.entries(groups).map(([id, items]) => ({
|
const executionGroups = Object.entries(groups).map(([id, groupItems]) => ({
|
||||||
id,
|
id,
|
||||||
type: id.startsWith('P') ? 'parallel' : id.startsWith('S') ? 'sequential' : 'unknown',
|
type: id.startsWith('P') ? 'parallel' : id.startsWith('S') ? 'sequential' : 'unknown',
|
||||||
task_count: items.length,
|
// Use appropriate count field based on queue type
|
||||||
tasks: items.map(i => i.item_id)
|
...(isSolutionBased
|
||||||
|
? { solution_count: groupItems.length, solutions: groupItems.map(i => i.item_id) }
|
||||||
|
: { task_count: groupItems.length, tasks: groupItems.map(i => i.item_id) })
|
||||||
})).sort((a, b) => {
|
})).sort((a, b) => {
|
||||||
const aFirst = groups[a.id]?.[0]?.execution_order || 0;
|
const aFirst = groups[a.id]?.[0]?.execution_order || 0;
|
||||||
const bFirst = groups[b.id]?.[0]?.execution_order || 0;
|
const bFirst = groups[b.id]?.[0]?.execution_order || 0;
|
||||||
@@ -323,7 +358,7 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// POST /api/queue/reorder - Reorder queue items
|
// POST /api/queue/reorder - Reorder queue items (supports both solutions and tasks)
|
||||||
if (pathname === '/api/queue/reorder' && req.method === 'POST') {
|
if (pathname === '/api/queue/reorder' && req.method === 'POST') {
|
||||||
handlePostRequest(req, res, async (body: any) => {
|
handlePostRequest(req, res, async (body: any) => {
|
||||||
const { groupId, newOrder } = body;
|
const { groupId, newOrder } = body;
|
||||||
@@ -332,8 +367,11 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const queue = readQueue(issuesDir);
|
const queue = readQueue(issuesDir);
|
||||||
const groupItems = queue.tasks.filter((item: any) => item.execution_group === groupId);
|
const items = getQueueItems(queue);
|
||||||
const otherItems = queue.tasks.filter((item: any) => item.execution_group !== groupId);
|
const isSolutionBased = isSolutionBasedQueue(queue);
|
||||||
|
|
||||||
|
const groupItems = items.filter((item: any) => item.execution_group === groupId);
|
||||||
|
const otherItems = items.filter((item: any) => item.execution_group !== groupId);
|
||||||
|
|
||||||
if (groupItems.length === 0) return { error: `No items in group ${groupId}` };
|
if (groupItems.length === 0) return { error: `No items in group ${groupId}` };
|
||||||
|
|
||||||
@@ -347,7 +385,7 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
|
|
||||||
const itemMap = new Map(groupItems.map((i: any) => [i.item_id, i]));
|
const itemMap = new Map(groupItems.map((i: any) => [i.item_id, i]));
|
||||||
const reorderedItems = newOrder.map((qid: string, idx: number) => ({ ...itemMap.get(qid), _idx: idx }));
|
const reorderedItems = newOrder.map((qid: string, idx: number) => ({ ...itemMap.get(qid), _idx: idx }));
|
||||||
const newQueue = [...otherItems, ...reorderedItems].sort((a, b) => {
|
const newQueueItems = [...otherItems, ...reorderedItems].sort((a, b) => {
|
||||||
const aGroup = parseInt(a.execution_group?.match(/\d+/)?.[0] || '999');
|
const aGroup = parseInt(a.execution_group?.match(/\d+/)?.[0] || '999');
|
||||||
const bGroup = parseInt(b.execution_group?.match(/\d+/)?.[0] || '999');
|
const bGroup = parseInt(b.execution_group?.match(/\d+/)?.[0] || '999');
|
||||||
if (aGroup !== bGroup) return aGroup - bGroup;
|
if (aGroup !== bGroup) return aGroup - bGroup;
|
||||||
@@ -357,8 +395,14 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
return (a.execution_order || 0) - (b.execution_order || 0);
|
return (a.execution_order || 0) - (b.execution_order || 0);
|
||||||
});
|
});
|
||||||
|
|
||||||
newQueue.forEach((item, idx) => { item.execution_order = idx + 1; delete item._idx; });
|
newQueueItems.forEach((item, idx) => { item.execution_order = idx + 1; delete item._idx; });
|
||||||
queue.tasks = newQueue;
|
|
||||||
|
// Write back to appropriate array based on queue type
|
||||||
|
if (isSolutionBased) {
|
||||||
|
queue.solutions = newQueueItems;
|
||||||
|
} else {
|
||||||
|
queue.tasks = newQueueItems;
|
||||||
|
}
|
||||||
writeQueue(issuesDir, queue);
|
writeQueue(issuesDir, queue);
|
||||||
|
|
||||||
return { success: true, groupId, reordered: newOrder.length };
|
return { success: true, groupId, reordered: newOrder.length };
|
||||||
|
|||||||
Reference in New Issue
Block a user