feat(queue): 添加队列合并功能,支持跳过重复项并标记源队列为已合并

This commit is contained in:
catlog22
2026-01-19 15:35:41 +08:00
parent e58c33fb6e
commit eeaefa7208
5 changed files with 592 additions and 59 deletions

View File

@@ -65,9 +65,13 @@ Queue formation command using **issue-queue-agent** that analyzes all bound solu
--queues <n> Number of parallel queues (default: 1)
--issue <id> Form queue for specific issue only
--append <id> Append issue to active queue (don't create new)
--force Skip active queue check, always create new queue
# CLI subcommands (ccw issue queue ...)
ccw issue queue list List all queues with status
ccw issue queue add <issue-id> Add issue to queue (interactive if active queue exists)
ccw issue queue add <issue-id> -f Add to new queue without prompt (force)
ccw issue queue merge <src> --queue <target> Merge source queue into target queue
ccw issue queue switch <queue-id> Switch active queue
ccw issue queue archive Archive current queue
ccw issue queue delete <queue-id> Delete queue from history
@@ -92,7 +96,7 @@ Phase 2-4: Agent-Driven Queue Formation (issue-queue-agent)
│ ├─ Build dependency DAG from conflicts
│ ├─ Calculate semantic priority per solution
│ └─ Assign execution groups (parallel/sequential)
└─ Each agent writes: queue JSON + index update
└─ Each agent writes: queue JSON + index update (NOT active yet)
Phase 5: Conflict Clarification (if needed)
├─ Collect `clarifications` arrays from all agents
@@ -102,7 +106,24 @@ Phase 5: Conflict Clarification (if needed)
Phase 6: Status Update & Summary
├─ Update issue statuses to 'queued'
└─ Display queue summary (N queues), next step: /issue:execute
└─ Display new queue summary (N queues)
Phase 7: Active Queue Check & Decision (REQUIRED)
├─ Read queue index: ccw issue queue list --brief
├─ Get generated queue ID from agent output
├─ If NO active queue exists:
│ ├─ Set generated queue as active_queue_id
│ ├─ Update index.json
│ └─ Display: "Queue created and activated"
└─ If active queue exists with items:
├─ Display both queues to user
├─ Use AskUserQuestion to prompt:
│ ├─ "Use new queue (keep existing)" → Set new as active, keep old inactive
│ ├─ "Merge: add new items to existing" → Merge new → existing, delete new
│ ├─ "Merge: add existing items to new" → Merge existing → new, archive old
│ └─ "Cancel" → Delete new queue, keep existing active
└─ Execute chosen action
```
## Implementation
@@ -306,6 +327,41 @@ ccw issue update <issue-id> --status queued
- Show unplanned issues (planned but NOT in queue)
- Show next step: `/issue:execute`
### Phase 7: Active Queue Check & Decision
**After agent completes Phase 1-6, check for active queue:**
```bash
ccw issue queue list --brief
```
**Decision:**
- If `active_queue_id` is null → `ccw issue queue switch <new-queue-id>` (activate new queue)
- If active queue exists → Use **AskUserQuestion** to prompt user
**AskUserQuestion:**
```javascript
AskUserQuestion({
questions: [{
question: "Active queue exists. How would you like to proceed?",
header: "Queue Action",
options: [
{ label: "Merge into existing queue", description: "Add new items to active queue, delete new queue" },
{ label: "Use new queue", description: "Switch to new queue, keep existing in history" },
{ label: "Cancel", description: "Delete new queue, keep existing active" }
],
multiSelect: false
}]
})
```
**Action Commands:**
| User Choice | Commands |
|-------------|----------|
| **Merge into existing** | `ccw issue queue merge <new-queue-id> --queue <active-queue-id>` then `ccw issue queue delete <new-queue-id>` |
| **Use new queue** | `ccw issue queue switch <new-queue-id>` |
| **Cancel** | `ccw issue queue delete <new-queue-id>` |
## Storage Structure (Queue History)
@@ -360,6 +416,9 @@ ccw issue update <issue-id> --status queued
| User cancels clarification | Abort queue formation |
| **index.json not updated** | Auto-fix: Set active_queue_id to new queue |
| **Queue file missing solutions** | Abort with error, agent must regenerate |
| **User cancels queue add** | Display message, return without changes |
| **Merge with empty source** | Skip merge, display warning |
| **All items duplicate** | Skip merge, display "All items already exist" |
## Quality Checklist

View File

@@ -650,6 +650,125 @@ function createEmptyQueue(): Queue {
};
}
interface MergeResult {
success: boolean;
itemsMerged: number;
totalItems: number;
skippedDuplicates: number;
reason?: string;
}
/**
* Merge items from source queue into target queue
* - Skips duplicate items (same issue_id + solution_id)
* - Re-generates item IDs for merged items
* - Marks source queue as 'merged' with metadata (or deletes if deleteSource=true)
* - Updates queue index
*/
function mergeQueues(target: Queue, source: Queue, options?: { deleteSource?: boolean }): MergeResult {
const sourceItems = source.solutions || source.tasks || [];
const targetItems = target.solutions || target.tasks || [];
if (sourceItems.length === 0) {
return { success: false, itemsMerged: 0, totalItems: targetItems.length, skippedDuplicates: 0, reason: 'Source queue is empty' };
}
// Ensure target has solutions array
if (!target.solutions) {
target.solutions = [];
}
let itemsMerged = 0;
let skippedDuplicates = 0;
for (const sourceItem of sourceItems) {
// Skip if already exists in target (same issue_id + solution_id)
const exists = target.solutions.some(
t => t.issue_id === sourceItem.issue_id && t.solution_id === sourceItem.solution_id
);
if (exists) {
skippedDuplicates++;
continue;
}
// Add issue to target's issue_ids if not present
if (!target.issue_ids.includes(sourceItem.issue_id)) {
target.issue_ids.push(sourceItem.issue_id);
}
// Clone and add item with new item_id
const newItem: QueueItem = {
...sourceItem,
item_id: generateQueueItemId(target, 'solution'),
execution_order: target.solutions.length + 1
};
target.solutions.push(newItem);
itemsMerged++;
}
// Merge conflicts if any
if (source.conflicts && source.conflicts.length > 0) {
if (!target.conflicts) target.conflicts = [];
target.conflicts.push(...source.conflicts);
}
// Write updated target queue
writeQueue(target);
// Handle source queue: delete or mark as merged
const index = readQueueIndex();
if (options?.deleteSource) {
// Delete source queue file and remove from index
const queuePath = join(getQueuesDir(), `${source.id}.json`);
if (existsSync(queuePath)) {
unlinkSync(queuePath);
}
index.queues = index.queues.filter(q => q.id !== source.id);
} else {
// Mark source queue as merged
source.status = 'merged' as any;
if (!source._metadata) {
source._metadata = {
version: '2.1',
total_tasks: 0,
pending_count: 0,
executing_count: 0,
completed_count: 0,
failed_count: 0,
updated_at: new Date().toISOString()
};
}
(source._metadata as any).merged_into = target.id;
(source._metadata as any).merged_at = new Date().toISOString();
writeQueue(source);
const sourceEntry = index.queues.find(q => q.id === source.id);
if (sourceEntry) {
sourceEntry.status = 'merged';
}
}
// Update target entry in index
const targetEntry = index.queues.find(q => q.id === target.id);
if (targetEntry) {
targetEntry.total_solutions = target.solutions.length;
targetEntry.completed_solutions = target.solutions.filter(s => s.status === 'completed').length;
targetEntry.issue_ids = target.issue_ids;
}
writeQueueIndex(index);
return {
success: itemsMerged > 0,
itemsMerged,
totalItems: target.solutions.length,
skippedDuplicates,
reason: itemsMerged === 0 ? 'All items already exist in target queue' : undefined
};
}
// ============ Multi-Queue Helper Functions ============
/**
@@ -1826,6 +1945,58 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
return;
}
// Merge queues: ccw issue queue merge <source-id> --queue <target-id>
if (subAction === 'merge' && issueId) {
const sourceQueueId = issueId; // issueId is actually source queue ID here
const targetQueueId = options.queue; // --queue option
if (!targetQueueId) {
console.error(chalk.red('Target queue ID required'));
console.error(chalk.gray('Usage: ccw issue queue merge <source-id> --queue <target-id>'));
process.exit(1);
}
const sourceQueue = readQueue(sourceQueueId);
const targetQueue = readQueue(targetQueueId);
if (!sourceQueue) {
console.error(chalk.red(`Source queue "${sourceQueueId}" not found`));
process.exit(1);
}
if (!targetQueue) {
console.error(chalk.red(`Target queue "${targetQueueId}" not found`));
process.exit(1);
}
// mergeQueues marks source as 'merged' and updates index
const result = mergeQueues(targetQueue, sourceQueue);
if (options.json) {
console.log(JSON.stringify({
success: result.success,
sourceQueueId,
targetQueueId,
itemsMerged: result.itemsMerged,
skippedDuplicates: result.skippedDuplicates,
totalItems: result.totalItems,
reason: result.reason
}, null, 2));
} else {
if (result.success) {
console.log(chalk.green(`✓ Merged ${result.itemsMerged} items from ${sourceQueueId} into ${targetQueueId}`));
if (result.skippedDuplicates > 0) {
console.log(chalk.gray(` Skipped ${result.skippedDuplicates} duplicate items`));
}
console.log(chalk.gray(` Total items in target: ${result.totalItems}`));
console.log(chalk.gray(` Source queue ${sourceQueueId} marked as 'merged'`));
} else {
console.log(chalk.yellow(`⚠ Merge skipped: ${result.reason}`));
}
}
return;
}
// Archive current queue
if (subAction === 'archive') {
const queue = readActiveQueue();
@@ -1900,32 +2071,12 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
process.exit(1);
}
// Get or create active queue (create new if current is completed/archived)
let queue = readActiveQueue();
const items = queue.solutions || [];
const isNewQueue = items.length === 0 || queue.status !== 'active';
if (queue.status !== 'active') {
// Create new queue if current is not active
queue = createEmptyQueue();
}
// Ensure solutions array exists
if (!queue.solutions) {
queue.solutions = [];
}
// Check if solution already in queue
const exists = queue.solutions.some(q => q.issue_id === issueId && q.solution_id === solution.id);
if (exists) {
console.log(chalk.yellow(`Solution ${solution.id} already in queue`));
return;
}
// Step 1: Create new queue (temporary, not active yet)
const newQueue = createEmptyQueue();
newQueue.solutions = [];
// Add issue to queue's issue list
if (!queue.issue_ids.includes(issueId)) {
queue.issue_ids.push(issueId);
}
newQueue.issue_ids.push(issueId);
// Collect all files touched by this solution
const filesTouched = new Set<string>();
@@ -1936,12 +2087,12 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
}
// Create solution-level queue item (S-N)
queue.solutions.push({
item_id: generateQueueItemId(queue, 'solution'),
newQueue.solutions.push({
item_id: generateQueueItemId(newQueue, 'solution'),
issue_id: issueId,
solution_id: solution.id,
status: 'pending',
execution_order: queue.solutions.length + 1,
execution_order: 1,
execution_group: 'P1',
depends_on: [],
semantic_priority: 0.5,
@@ -1949,13 +2100,76 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
files_touched: Array.from(filesTouched)
});
writeQueue(queue);
// Step 2: Write temporary queue file
writeQueue(newQueue);
updateIssue(issueId, { status: 'queued', queued_at: new Date().toISOString() });
if (isNewQueue) {
console.log(chalk.green(`✓ Created queue ${queue.id}`));
console.log(chalk.green(`✓ Created temporary queue ${newQueue.id}`));
console.log(chalk.gray(` Solution ${solution.id} (${solution.tasks?.length || 0} tasks)`));
// Step 3: Check for existing active queue
const existingQueue = readQueue();
const hasActiveQueue = existingQueue && existingQueue.status === 'active' &&
(existingQueue.solutions?.length || existingQueue.tasks?.length || 0) > 0;
if (!hasActiveQueue || options.force) {
// No active queue or force flag - set new queue as active
const index = readQueueIndex();
index.active_queue_id = newQueue.id;
writeQueueIndex(index);
console.log(chalk.green(`✓ Queue ${newQueue.id} activated`));
return;
}
console.log(chalk.green(`✓ Added solution ${solution.id} (${solution.tasks?.length || 0} tasks) to queue`));
// Step 4: Active queue exists - prompt user
const existingItems = existingQueue!.solutions || existingQueue!.tasks || [];
console.log();
console.log(chalk.cyan(`Active queue exists: ${existingQueue!.id}`));
console.log(chalk.gray(` Issues: ${existingQueue!.issue_ids.join(', ')}`));
console.log(chalk.gray(` Items: ${existingItems.length} (${existingItems.filter(i => i.status === 'completed').length} completed)`));
console.log();
const { action } = await inquirer.prompt([{
type: 'list',
name: 'action',
message: 'How would you like to proceed?',
choices: [
{ name: 'Merge into existing queue', value: 'merge_to_existing' },
{ name: 'Use new queue', value: 'use_new' },
{ name: 'Cancel', value: 'cancel' }
]
}]);
// Step 5: Execute user choice
if (action === 'cancel') {
// Delete temporary queue
const queuePath = join(getQueuesDir(), `${newQueue.id}.json`);
unlinkSync(queuePath);
console.log(chalk.yellow(`✓ New queue deleted, keeping ${existingQueue!.id} active`));
return;
}
if (action === 'use_new') {
// Switch to new queue
const index = readQueueIndex();
index.active_queue_id = newQueue.id;
writeQueueIndex(index);
console.log(chalk.green(`✓ Switched to new queue ${newQueue.id}`));
console.log(chalk.gray(` Previous queue ${existingQueue!.id} remains in history`));
return;
}
if (action === 'merge_to_existing') {
// Merge new → existing, delete temporary queue
const mergeResult = mergeQueues(existingQueue!, newQueue, { deleteSource: true });
console.log(chalk.green(`✓ Merged ${mergeResult.itemsMerged} items into ${existingQueue!.id}`));
if (mergeResult.skippedDuplicates > 0) {
console.log(chalk.gray(` Skipped ${mergeResult.skippedDuplicates} duplicate items`));
}
console.log(chalk.gray(` Temporary queue ${newQueue.id} deleted`));
return;
}
return;
}

View File

@@ -637,22 +637,44 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
const targetItems = targetQueue.solutions || targetQueue.tasks || [];
const isSolutionBased = !!targetQueue.solutions;
// Re-index source items to avoid ID conflicts
const maxOrder = targetItems.reduce((max: number, i: any) => Math.max(max, i.execution_order || 0), 0);
const reindexedSourceItems = sourceItems.map((item: any, idx: number) => ({
...item,
item_id: `${item.item_id}-merged`,
execution_order: maxOrder + idx + 1,
execution_group: item.execution_group ? `M-${item.execution_group}` : 'M-ungrouped'
}));
if (!isSolutionBased) {
targetQueue.solutions = [];
}
// Merge items
const mergedItems = [...targetItems, ...reindexedSourceItems];
// Helper to generate next item ID (S-N format)
const getNextItemId = (): string => {
const items = targetQueue.solutions || [];
const maxNum = items.reduce((max: number, i: any) => {
const match = i.item_id?.match(/^S-(\d+)$/);
return match ? Math.max(max, parseInt(match[1])) : max;
}, 0);
return `S-${maxNum + 1}`;
};
if (isSolutionBased) {
targetQueue.solutions = mergedItems;
} else {
targetQueue.tasks = mergedItems;
let itemsMerged = 0;
let skippedDuplicates = 0;
for (const sourceItem of sourceItems) {
// Skip duplicates (same issue_id + solution_id)
const exists = (targetQueue.solutions || []).some(
(t: any) => t.issue_id === sourceItem.issue_id && t.solution_id === sourceItem.solution_id
);
if (exists) {
skippedDuplicates++;
continue;
}
// Add with new item_id (S-N format)
const newItem = {
...sourceItem,
item_id: getNextItemId(),
execution_order: (targetQueue.solutions?.length || 0) + 1
};
if (!targetQueue.solutions) targetQueue.solutions = [];
targetQueue.solutions.push(newItem);
itemsMerged++;
}
// Merge issue_ids
@@ -662,20 +684,26 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
])];
targetQueue.issue_ids = mergedIssueIds;
// Merge conflicts
if (sourceQueue.conflicts && sourceQueue.conflicts.length > 0) {
if (!targetQueue.conflicts) targetQueue.conflicts = [];
targetQueue.conflicts.push(...sourceQueue.conflicts);
}
// Update metadata
const mergedItems = targetQueue.solutions || [];
const completedCount = mergedItems.filter((i: any) => i.status === 'completed').length;
targetQueue._metadata = {
...targetQueue._metadata,
updated_at: new Date().toISOString(),
...(isSolutionBased
? { total_solutions: mergedItems.length, completed_solutions: completedCount }
: { total_tasks: mergedItems.length, completed_tasks: completedCount })
total_solutions: mergedItems.length,
completed_solutions: completedCount
};
// Write merged queue
writeFileSync(targetPath, JSON.stringify(targetQueue, null, 2));
// Update source queue status
// Update source queue status to 'merged'
sourceQueue.status = 'merged';
sourceQueue._metadata = {
...sourceQueue._metadata,
@@ -695,13 +723,8 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
sourceEntry.status = 'merged';
}
if (targetEntry) {
if (isSolutionBased) {
targetEntry.total_solutions = mergedItems.length;
targetEntry.completed_solutions = completedCount;
} else {
targetEntry.total_tasks = mergedItems.length;
targetEntry.completed_tasks = completedCount;
}
targetEntry.total_solutions = mergedItems.length;
targetEntry.completed_solutions = completedCount;
targetEntry.issue_ids = mergedIssueIds;
}
writeFileSync(indexPath, JSON.stringify(index, null, 2));
@@ -714,7 +737,8 @@ export async function handleIssueRoutes(ctx: RouteContext): Promise<boolean> {
success: true,
sourceQueueId,
targetQueueId,
mergedItemCount: sourceItems.length,
mergedItemCount: itemsMerged,
skippedDuplicates,
totalItems: mergedItems.length
};
} catch (err) {

View File

@@ -292,5 +292,65 @@ describe('issue routes integration', async () => {
assert.equal(Array.isArray(res.json.execution_groups), true);
assert.equal(typeof res.json.grouped_items, 'object');
});
it('POST /api/queue/merge merges source queue into target and skips duplicates', async () => {
const { writeFileSync, mkdirSync } = await import('fs');
const { join } = await import('path');
// Create queues directory
const queuesDir = join(projectRoot, '.workflow', 'issues', 'queues');
mkdirSync(queuesDir, { recursive: true });
// Create target queue
const targetQueue = {
id: 'QUE-TARGET',
status: 'active',
issue_ids: ['ISS-1'],
solutions: [
{ item_id: 'S-1', issue_id: 'ISS-1', solution_id: 'SOL-1', status: 'pending' }
],
conflicts: []
};
writeFileSync(join(queuesDir, 'QUE-TARGET.json'), JSON.stringify(targetQueue));
// Create source queue with one duplicate and one new item
const sourceQueue = {
id: 'QUE-SOURCE',
status: 'active',
issue_ids: ['ISS-1', 'ISS-2'],
solutions: [
{ item_id: 'S-1', issue_id: 'ISS-1', solution_id: 'SOL-1', status: 'pending' }, // Duplicate
{ item_id: 'S-2', issue_id: 'ISS-2', solution_id: 'SOL-2', status: 'pending' } // New
],
conflicts: []
};
writeFileSync(join(queuesDir, 'QUE-SOURCE.json'), JSON.stringify(sourceQueue));
// Create index
writeFileSync(join(queuesDir, 'index.json'), JSON.stringify({
active_queue_id: 'QUE-TARGET',
queues: [
{ id: 'QUE-TARGET', status: 'active' },
{ id: 'QUE-SOURCE', status: 'active' }
]
}));
// Merge
const res = await requestJson(baseUrl, 'POST', '/api/queue/merge', {
sourceQueueId: 'QUE-SOURCE',
targetQueueId: 'QUE-TARGET'
});
assert.equal(res.status, 200);
assert.equal(res.json.success, true);
assert.equal(res.json.mergedItemCount, 1); // Only new item merged
assert.equal(res.json.skippedDuplicates, 1); // Duplicate skipped
assert.equal(res.json.totalItems, 2); // Target now has 2 items
// Verify source queue is marked as merged
const sourceContent = JSON.parse(readFileSync(join(queuesDir, 'QUE-SOURCE.json'), 'utf8'));
assert.equal(sourceContent.status, 'merged');
assert.equal(sourceContent._metadata.merged_into, 'QUE-TARGET');
});
});

View File

@@ -965,6 +965,182 @@ describe('issue command module', async () => {
assert.equal(existsSync(join(env.queuesDir, `${queueId}.json`)), false);
});
it('queue merge merges source queue into target and marks source as merged', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);
const logs: string[] = [];
mock.method(console, 'log', (...args: any[]) => {
logs.push(args.map(String).join(' '));
});
mock.method(console, 'error', () => {});
// Create target queue
const targetId = 'QUE-TARGET-001';
issueModule.writeQueue({
id: targetId,
status: 'active',
issue_ids: ['ISS-1'],
tasks: [],
solutions: [
{
item_id: 'S-1',
issue_id: 'ISS-1',
solution_id: 'SOL-ISS-1-1',
status: 'pending',
execution_order: 1,
files_touched: ['src/a.ts'],
task_count: 1,
},
],
conflicts: [],
});
// Create source queue
const sourceId = 'QUE-SOURCE-001';
issueModule.writeQueue({
id: sourceId,
status: 'active',
issue_ids: ['ISS-2'],
tasks: [],
solutions: [
{
item_id: 'S-1',
issue_id: 'ISS-2',
solution_id: 'SOL-ISS-2-1',
status: 'pending',
execution_order: 1,
files_touched: ['src/b.ts'],
task_count: 2,
},
],
conflicts: [{ id: 'CFT-1', type: 'file', severity: 'low' }],
});
// Set target as active queue
const indexPath = join(env.queuesDir, 'index.json');
writeFileSync(indexPath, JSON.stringify({ active_queue_id: targetId, queues: [] }));
await issueModule.issueCommand('queue', ['merge', sourceId], { queue: targetId });
// Verify merge result
const mergedTarget = issueModule.readQueue(targetId);
assert.ok(mergedTarget);
assert.equal(mergedTarget.solutions.length, 2);
assert.equal(mergedTarget.solutions[0].item_id, 'S-1');
assert.equal(mergedTarget.solutions[1].item_id, 'S-2'); // Re-generated ID
assert.equal(mergedTarget.solutions[1].issue_id, 'ISS-2');
assert.deepEqual(mergedTarget.issue_ids, ['ISS-1', 'ISS-2']);
assert.equal(mergedTarget.conflicts.length, 1); // Merged conflicts
// Verify source queue is marked as merged
const sourceQueue = issueModule.readQueue(sourceId);
assert.ok(sourceQueue);
assert.equal(sourceQueue.status, 'merged');
assert.equal(sourceQueue._metadata?.merged_into, targetId);
});
it('queue merge skips duplicate solutions with same issue_id and solution_id', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);
mock.method(console, 'log', () => {});
mock.method(console, 'error', () => {});
const targetId = 'QUE-TARGET-DUP';
const sourceId = 'QUE-SOURCE-DUP';
// Create target with a solution
issueModule.writeQueue({
id: targetId,
status: 'active',
issue_ids: ['ISS-DUP'],
tasks: [],
solutions: [
{
item_id: 'S-1',
issue_id: 'ISS-DUP',
solution_id: 'SOL-ISS-DUP-1',
status: 'pending',
execution_order: 1,
files_touched: ['src/dup.ts'],
task_count: 1,
},
],
conflicts: [],
});
// Create source with same solution (duplicate)
issueModule.writeQueue({
id: sourceId,
status: 'active',
issue_ids: ['ISS-DUP'],
tasks: [],
solutions: [
{
item_id: 'S-1',
issue_id: 'ISS-DUP',
solution_id: 'SOL-ISS-DUP-1', // Same issue_id + solution_id
status: 'pending',
execution_order: 1,
files_touched: ['src/dup.ts'],
task_count: 1,
},
],
conflicts: [],
});
const indexPath = join(env.queuesDir, 'index.json');
writeFileSync(indexPath, JSON.stringify({ active_queue_id: targetId, queues: [] }));
await issueModule.issueCommand('queue', ['merge', sourceId], { queue: targetId });
const mergedTarget = issueModule.readQueue(targetId);
assert.ok(mergedTarget);
// Should still have only 1 solution (duplicate skipped)
assert.equal(mergedTarget.solutions.length, 1);
assert.equal(mergedTarget.solutions[0].solution_id, 'SOL-ISS-DUP-1');
});
it('queue merge returns skipped reason when source is empty', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);
const logs: string[] = [];
mock.method(console, 'log', (...args: any[]) => {
logs.push(args.map(String).join(' '));
});
mock.method(console, 'error', () => {});
const targetId = 'QUE-TARGET-EMPTY';
const sourceId = 'QUE-SOURCE-EMPTY';
issueModule.writeQueue({
id: targetId,
status: 'active',
issue_ids: [],
tasks: [],
solutions: [{ item_id: 'S-1', issue_id: 'ISS-1', solution_id: 'SOL-1', status: 'pending' }],
conflicts: [],
});
issueModule.writeQueue({
id: sourceId,
status: 'active',
issue_ids: [],
tasks: [],
solutions: [], // Empty source
conflicts: [],
});
const indexPath = join(env.queuesDir, 'index.json');
writeFileSync(indexPath, JSON.stringify({ active_queue_id: targetId, queues: [] }));
await issueModule.issueCommand('queue', ['merge', sourceId], { queue: targetId });
assert.ok(logs.some((l) => l.includes('skipped') || l.includes('empty')));
});
});
describe('Queue Execution', () => {