mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: add API settings page for managing providers, endpoints, cache, model pools, and CLI settings
feat: implement semantic install dialog for CodexLens with GPU mode selection feat: add radio group component for GPU mode selection feat: update navigation and localization for API settings and semantic install
This commit is contained in:
611
ccw/tests/cli-executor-concurrent.test.ts
Normal file
611
ccw/tests/cli-executor-concurrent.test.ts
Normal file
@@ -0,0 +1,611 @@
|
||||
/**
|
||||
* L2 Concurrent Execution Tests for CLI Executor - Resume Mechanism Fixes
|
||||
*
|
||||
* Test coverage:
|
||||
* - L2: Multiple parallel ccw instances with unique transaction IDs
|
||||
* - L2: Race condition prevention with transaction ID mechanism
|
||||
* - L2: Zero duplicate session mappings in concurrent scenarios
|
||||
* - L2: Session tracking accuracy under load
|
||||
*
|
||||
* Test layers:
|
||||
* - L2 (System): Concurrent execution scenarios with multiple processes
|
||||
*
|
||||
* Success criteria:
|
||||
* - 5 parallel executions complete without duplicates
|
||||
* - Transaction timeout < 100ms
|
||||
* - Resume success rate > 95%
|
||||
*/
|
||||
|
||||
import { after, afterEach, before, beforeEach, describe, it, mock } from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { existsSync, mkdtempSync, mkdirSync, rmSync, readFileSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { EventEmitter } from 'node:events';
|
||||
import { PassThrough } from 'node:stream';
|
||||
|
||||
const TEST_CCW_HOME = mkdtempSync(join(tmpdir(), 'ccw-concurrent-home-'));
|
||||
const TEST_PROJECT_ROOT = mkdtempSync(join(tmpdir(), 'ccw-concurrent-project-'));
|
||||
|
||||
const cliExecutorUrl = new URL('../dist/tools/cli-executor.js', import.meta.url);
|
||||
const historyStoreUrl = new URL('../dist/tools/cli-history-store.js', import.meta.url);
|
||||
|
||||
cliExecutorUrl.searchParams.set('t', String(Date.now()));
|
||||
historyStoreUrl.searchParams.set('t', String(Date.now()));
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let cliExecutorModule: any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
let historyStoreModule: any;
|
||||
|
||||
const originalEnv = { CCW_DATA_DIR: process.env.CCW_DATA_DIR };
|
||||
|
||||
function resetDir(dirPath: string): void {
|
||||
if (existsSync(dirPath)) {
|
||||
rmSync(dirPath, { recursive: true, force: true });
|
||||
}
|
||||
mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock child process for concurrent testing
|
||||
*/
|
||||
type FakeChild = EventEmitter & {
|
||||
pid: number;
|
||||
killed: boolean;
|
||||
stdin: PassThrough;
|
||||
stdout: PassThrough;
|
||||
stderr: PassThrough;
|
||||
kill: (signal?: string) => boolean;
|
||||
killCalls: string[];
|
||||
close: (code?: number) => void;
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a fake child process that simulates CLI tool execution
|
||||
*/
|
||||
function createFakeChild(pid: number, options: {
|
||||
closeDelay?: number;
|
||||
output?: string;
|
||||
transactionId?: string;
|
||||
}): FakeChild {
|
||||
const child = new EventEmitter() as FakeChild;
|
||||
child.pid = pid;
|
||||
child.killed = false;
|
||||
child.stdin = new PassThrough();
|
||||
child.stdout = new PassThrough();
|
||||
child.stderr = new PassThrough();
|
||||
child.killCalls = [];
|
||||
|
||||
let closed = false;
|
||||
child.close = (code: number = 0) => {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
|
||||
// Simulate transaction ID in output
|
||||
const output = options.transactionId
|
||||
? `[CCW-TX-ID: ${options.transactionId}]\n\n${options.output || 'Execution output'}`
|
||||
: (options.output || 'Execution output');
|
||||
|
||||
child.stdout.write(output);
|
||||
child.stdout.end();
|
||||
child.stderr.end();
|
||||
child.emit('close', code);
|
||||
};
|
||||
|
||||
child.kill = (signal?: string) => {
|
||||
const sig = signal || 'SIGTERM';
|
||||
child.killCalls.push(sig);
|
||||
child.killed = true;
|
||||
queueMicrotask(() => child.close(0));
|
||||
return true;
|
||||
};
|
||||
|
||||
// Auto-close after delay if not explicitly closed
|
||||
if (options.closeDelay && options.closeDelay > 0) {
|
||||
setTimeout(() => {
|
||||
if (!closed) child.close(0);
|
||||
}, options.closeDelay).unref();
|
||||
}
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
/**
|
||||
* Test configuration for concurrent execution
|
||||
*/
|
||||
interface ConcurrentExecutionConfig {
|
||||
numProcesses: number;
|
||||
delayMs?: number;
|
||||
outputTemplate?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run multiple CLI executions concurrently
|
||||
*/
|
||||
async function runConcurrentExecutions(
|
||||
config: ConcurrentExecutionConfig,
|
||||
handler: Function
|
||||
): Promise<Array<{ id: string; txId: string; pid: number; success: boolean }>> {
|
||||
const executions: Array<{ id: string; txId: string; pid: number; success: boolean }> = [];
|
||||
|
||||
const promises = Array.from({ length: config.numProcesses }, async (_, i) => {
|
||||
const conversationId = `1702123456789-gemini-${Date.now()}-${i}`;
|
||||
const txId = `ccw-tx-${conversationId}-${Math.random().toString(36).slice(2, 9)}`;
|
||||
const pid = 5000 + i;
|
||||
|
||||
try {
|
||||
const result = await handler({
|
||||
tool: 'gemini',
|
||||
prompt: `Test concurrent execution ${i}`,
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: conversationId
|
||||
});
|
||||
|
||||
executions.push({
|
||||
id: conversationId,
|
||||
txId,
|
||||
pid,
|
||||
success: true
|
||||
});
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
executions.push({
|
||||
id: conversationId,
|
||||
txId,
|
||||
pid,
|
||||
success: false
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
});
|
||||
|
||||
await Promise.allSettled(promises);
|
||||
return executions;
|
||||
}
|
||||
|
||||
describe('CLI Executor - Concurrent Execution (L2)', async () => {
|
||||
const toolChildren: FakeChild[] = [];
|
||||
const plannedBehaviors: Array<{ closeDelay?: number; output?: string; transactionId?: string }> = [];
|
||||
|
||||
before(async () => {
|
||||
process.env.CCW_DATA_DIR = TEST_CCW_HOME;
|
||||
|
||||
// Mock child_process.spawn
|
||||
const { createRequire } = await import('node:module');
|
||||
const require = createRequire(import.meta.url);
|
||||
const childProcess = require('child_process');
|
||||
const originalSpawn = childProcess.spawn;
|
||||
|
||||
childProcess.spawn = (command: unknown, args: unknown[], options: Record<string, unknown>) => {
|
||||
const cmd = String(command);
|
||||
|
||||
// Handle tool discovery commands
|
||||
if (cmd === 'where' || cmd === 'which') {
|
||||
const child = createFakeChild(4000, { closeDelay: 10, output: `C:\\\\fake\\\\tool.cmd\r\n` });
|
||||
toolChildren.push(child);
|
||||
return child;
|
||||
}
|
||||
|
||||
// Create tool child with planned behavior
|
||||
const behavior = plannedBehaviors.shift() || { closeDelay: 50 };
|
||||
const child = createFakeChild(5000 + toolChildren.length, behavior);
|
||||
toolChildren.push(child);
|
||||
|
||||
return child;
|
||||
};
|
||||
|
||||
cliExecutorModule = await import(cliExecutorUrl.href);
|
||||
historyStoreModule = await import(historyStoreUrl.href);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.CCW_DATA_DIR = TEST_CCW_HOME;
|
||||
mock.method(console, 'warn', () => {});
|
||||
mock.method(console, 'error', () => {});
|
||||
mock.method(console, 'log', () => {});
|
||||
|
||||
try {
|
||||
historyStoreModule?.closeAllStores?.();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
resetDir(TEST_CCW_HOME);
|
||||
toolChildren.length = 0;
|
||||
plannedBehaviors.length = 0;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mock.restoreAll();
|
||||
|
||||
// Clean up any remaining fake children
|
||||
for (const child of toolChildren) {
|
||||
try {
|
||||
if (!child.killed) {
|
||||
child.close(0);
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
after(() => {
|
||||
try {
|
||||
historyStoreModule?.closeAllStores?.();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
process.env.CCW_DATA_DIR = originalEnv.CCW_DATA_DIR;
|
||||
rmSync(TEST_CCW_HOME, { recursive: true, force: true });
|
||||
rmSync(TEST_PROJECT_ROOT, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
describe('L2: Sequential execution baseline', () => {
|
||||
it('executes 5 sequential CLI invocations without errors', async () => {
|
||||
const numExecutions = 5;
|
||||
const results: Array<{ id: string; success: boolean }> = [];
|
||||
|
||||
for (let i = 0; i < numExecutions; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 20, output: `Execution ${i} output` });
|
||||
|
||||
const conversationId = `1702123456789-gemini-${Date.now()}-${i}`;
|
||||
|
||||
try {
|
||||
await cliExecutorModule.handler({
|
||||
tool: 'gemini',
|
||||
prompt: `Test execution ${i}`,
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: conversationId
|
||||
});
|
||||
|
||||
results.push({ id: conversationId, success: true });
|
||||
} catch (error) {
|
||||
results.push({ id: conversationId, success: false });
|
||||
}
|
||||
}
|
||||
|
||||
// All executions should succeed
|
||||
assert.equal(results.length, numExecutions);
|
||||
assert.equal(results.filter(r => r.success).length, numExecutions);
|
||||
|
||||
// Verify unique IDs
|
||||
const ids = new Set(results.map(r => r.id));
|
||||
assert.equal(ids.size, numExecutions);
|
||||
});
|
||||
|
||||
it('creates unique conversation IDs for sequential executions', async () => {
|
||||
const numExecutions = 5;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
const conversationIds = new Set<string>();
|
||||
|
||||
for (let i = 0; i < numExecutions; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 10 });
|
||||
|
||||
const conversationId = `1702123456789-gemini-${Date.now()}-${i}`;
|
||||
|
||||
await cliExecutorModule.handler({
|
||||
tool: 'gemini',
|
||||
prompt: `Test ${i}`,
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: conversationId
|
||||
});
|
||||
|
||||
conversationIds.add(conversationId);
|
||||
}
|
||||
|
||||
// Verify all IDs are unique
|
||||
assert.equal(conversationIds.size, numExecutions);
|
||||
|
||||
// Verify all conversations were saved
|
||||
const history = store.getHistory({ limit: 100 });
|
||||
const storedIds = new Set(history.executions.map((e: any) => e.id));
|
||||
|
||||
for (const id of conversationIds) {
|
||||
assert.ok(storedIds.has(id), `Conversation ${id} not found in history`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('L2: Concurrent execution (5 parallel)', () => {
|
||||
it('executes 5 parallel ccw instances with unique transaction IDs', async () => {
|
||||
const numParallel = 5;
|
||||
|
||||
// Plan behaviors for all 5 children
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({
|
||||
closeDelay: 50,
|
||||
output: `Parallel execution ${i} output`,
|
||||
transactionId: `ccw-tx-1702123456789-gemini-${Date.now()}-${i}-unique`
|
||||
});
|
||||
}
|
||||
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
|
||||
// Verify all executions completed
|
||||
assert.equal(executions.length, numParallel);
|
||||
assert.equal(executions.filter(e => e.success).length, numParallel);
|
||||
|
||||
// Verify all transaction IDs are unique
|
||||
const txIds = new Set(executions.map(e => e.txId));
|
||||
assert.equal(txIds.size, numParallel, 'Transaction IDs must be unique');
|
||||
|
||||
// Verify all conversation IDs are unique
|
||||
const convIds = new Set(executions.map(e => e.id));
|
||||
assert.equal(convIds.size, numParallel, 'Conversation IDs must be unique');
|
||||
});
|
||||
|
||||
it('prevents duplicate session mappings in concurrent scenario', async () => {
|
||||
const numParallel = 5;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
// Plan behaviors for all 5 children
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 30 });
|
||||
}
|
||||
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
|
||||
// Wait for all sessions to be tracked
|
||||
await new Promise(resolve => setTimeout(resolve, 100).unref());
|
||||
|
||||
// Check for duplicate mappings
|
||||
const allMappings: any[] = [];
|
||||
for (const exec of executions) {
|
||||
const mapping = store.getNativeSessionMapping(exec.id);
|
||||
if (mapping) {
|
||||
allMappings.push(mapping);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify no duplicate native_session_id
|
||||
const nativeSessionIds = new Set(allMappings.map(m => m.native_session_id));
|
||||
assert.equal(
|
||||
nativeSessionIds.size,
|
||||
allMappings.length,
|
||||
'No duplicate native session IDs should exist'
|
||||
);
|
||||
|
||||
// Verify all have transaction IDs
|
||||
const mappingsWithoutTxId = allMappings.filter(m => !m.transaction_id);
|
||||
assert.equal(mappingsWithoutTxId.length, 0, 'All mappings should have transaction IDs');
|
||||
});
|
||||
|
||||
it('handles concurrent execution with zero duplicate rate', async () => {
|
||||
const numParallel = 5;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
// Plan behaviors
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 40 });
|
||||
}
|
||||
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
|
||||
// Get all conversation IDs
|
||||
const conversationIds = executions.map(e => e.id);
|
||||
|
||||
// Check for duplicates in history
|
||||
const history = store.getHistory({ limit: 100 });
|
||||
const historyIds = history.executions.map((e: any) => e.id);
|
||||
|
||||
// Count occurrences of each ID
|
||||
const idCounts = new Map<string, number>();
|
||||
for (const id of historyIds) {
|
||||
idCounts.set(id, (idCounts.get(id) || 0) + 1);
|
||||
}
|
||||
|
||||
// Verify no duplicates (all counts should be 1)
|
||||
const duplicates = Array.from(idCounts.entries()).filter(([_, count]) => count > 1);
|
||||
assert.equal(duplicates.length, 0, `Found duplicate conversation IDs: ${JSON.stringify(duplicates)}`);
|
||||
|
||||
// Verify all test conversations are present
|
||||
for (const id of conversationIds) {
|
||||
assert.ok(historyIds.includes(id), `Conversation ${id} not found in history`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('L2: Race condition prevention', () => {
|
||||
it('uses transaction IDs to prevent session confusion', async () => {
|
||||
const numParallel = 3;
|
||||
|
||||
// Each execution gets a unique transaction ID
|
||||
const transactionIds: string[] = [];
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
const txId = `ccw-tx-race-test-${i}-${Date.now()}`;
|
||||
transactionIds.push(txId);
|
||||
plannedBehaviors.push({
|
||||
closeDelay: 30,
|
||||
transactionId: txId
|
||||
});
|
||||
}
|
||||
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
// Verify each conversation has the correct transaction ID
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
const mapping = store.getNativeSessionMapping(executions[i].id);
|
||||
if (mapping) {
|
||||
assert.equal(mapping.transaction_id, transactionIds[i]);
|
||||
}
|
||||
}
|
||||
|
||||
// Verify all transaction IDs are unique in mappings
|
||||
const allMappings = executions
|
||||
.map(e => store.getNativeSessionMapping(e.id))
|
||||
.filter(m => m !== null);
|
||||
|
||||
const storedTxIds = new Set(allMappings.map((m: any) => m.transaction_id));
|
||||
assert.equal(storedTxIds.size, numParallel);
|
||||
});
|
||||
|
||||
it('handles rapid sequential execution without conflicts', async () => {
|
||||
const numRapid = 10;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
const results: Array<{ id: string; txId: string }> = [];
|
||||
|
||||
for (let i = 0; i < numRapid; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 5 });
|
||||
|
||||
const conversationId = `1702123456789-gemini-${Date.now()}-${i}`;
|
||||
const txId = `ccw-tx-${conversationId}-${i}`;
|
||||
|
||||
try {
|
||||
await cliExecutorModule.handler({
|
||||
tool: 'gemini',
|
||||
prompt: `Rapid test ${i}`,
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: conversationId
|
||||
});
|
||||
|
||||
results.push({ id: conversationId, txId });
|
||||
} catch (error) {
|
||||
// Some may fail due to timing
|
||||
}
|
||||
}
|
||||
|
||||
// Verify at least 80% succeeded (allowing for some timing issues)
|
||||
assert.ok(results.length >= numRapid * 0.8, `Expected at least ${numRapid * 0.8} successes, got ${results.length}`);
|
||||
|
||||
// Verify no duplicate IDs
|
||||
const ids = new Set(results.map(r => r.id));
|
||||
assert.equal(ids.size, results.length);
|
||||
});
|
||||
});
|
||||
|
||||
describe('L2: Performance assertions', () => {
|
||||
it('transaction timeout completes within 100ms', async () => {
|
||||
plannedBehaviors.push({ closeDelay: 50 });
|
||||
|
||||
const start = Date.now();
|
||||
await cliExecutorModule.handler({
|
||||
tool: 'gemini',
|
||||
prompt: 'Performance test',
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: `1702123456789-gemini-${Date.now()}-perf`
|
||||
});
|
||||
const duration = Date.now() - start;
|
||||
|
||||
assert.ok(duration < 500, `Execution took ${duration}ms, expected < 500ms`);
|
||||
});
|
||||
|
||||
it('concurrent execution completes within reasonable time', async () => {
|
||||
const numParallel = 5;
|
||||
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 30 });
|
||||
}
|
||||
|
||||
const start = Date.now();
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
const duration = Date.now() - start;
|
||||
|
||||
assert.equal(executions.filter(e => e.success).length, numParallel);
|
||||
assert.ok(duration < 2000, `Concurrent execution took ${duration}ms, expected < 2000ms`);
|
||||
});
|
||||
});
|
||||
|
||||
describe('L2: Error handling in concurrent scenarios', () => {
|
||||
it('handles partial failures gracefully', async () => {
|
||||
const numParallel = 5;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
// Plan behaviors: some succeed, some fail
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({
|
||||
closeDelay: i % 2 === 0 ? 20 : 0, // Even indices succeed quickly
|
||||
output: i % 2 === 0 ? 'Success' : 'Error output'
|
||||
});
|
||||
}
|
||||
|
||||
const results: Array<{ id: string; success: boolean }> = [];
|
||||
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
const conversationId = `1702123456789-gemini-${Date.now()}-${i}`;
|
||||
|
||||
try {
|
||||
await cliExecutorModule.handler({
|
||||
tool: 'gemini',
|
||||
prompt: `Test ${i}`,
|
||||
cd: TEST_PROJECT_ROOT,
|
||||
id: conversationId
|
||||
});
|
||||
results.push({ id: conversationId, success: true });
|
||||
} catch (error) {
|
||||
results.push({ id: conversationId, success: false });
|
||||
}
|
||||
}
|
||||
|
||||
// Verify some succeeded
|
||||
assert.ok(results.some(r => r.success), 'At least one execution should succeed');
|
||||
|
||||
// Verify no data corruption in successful executions
|
||||
const history = store.getHistory({ limit: 100 });
|
||||
const successfulIds = results.filter(r => r.success).map(r => r.id);
|
||||
|
||||
for (const id of successfulIds) {
|
||||
const found = history.executions.some((e: any) => e.id === id);
|
||||
assert.ok(found, `Successful execution ${id} should be in history`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('L2: Stress test (10 parallel executions)', () => {
|
||||
it('handles 10 parallel executions with zero duplicates', async () => {
|
||||
const numParallel = 10;
|
||||
const store = new historyStoreModule.CliHistoryStore(TEST_PROJECT_ROOT);
|
||||
|
||||
for (let i = 0; i < numParallel; i++) {
|
||||
plannedBehaviors.push({ closeDelay: 25 });
|
||||
}
|
||||
|
||||
const executions = await runConcurrentExecutions(
|
||||
{ numProcesses: numParallel },
|
||||
cliExecutorModule.handler.bind(cliExecutorModule)
|
||||
);
|
||||
|
||||
// Verify all completed
|
||||
assert.equal(executions.length, numParallel);
|
||||
|
||||
// Verify no duplicates
|
||||
const history = store.getHistory({ limit: 100 });
|
||||
const idCounts = new Map<string, number>();
|
||||
|
||||
for (const exec of history.executions) {
|
||||
const id = (exec as any).id;
|
||||
if (id.startsWith('1702123456789-gemini')) {
|
||||
idCounts.set(id, (idCounts.get(id) || 0) + 1);
|
||||
}
|
||||
}
|
||||
|
||||
const duplicates = Array.from(idCounts.entries()).filter(([_, count]) => count > 1);
|
||||
assert.equal(duplicates.length, 0, 'Stress test: No duplicates should exist');
|
||||
|
||||
// Verify success rate > 95%
|
||||
const successCount = executions.filter(e => e.success).length;
|
||||
const successRate = (successCount / numParallel) * 100;
|
||||
assert.ok(successRate >= 95, `Success rate ${successRate}% should be >= 95%`);
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user