Add benchmark results for fast3 and fast4, implement KeepAliveLspBridge, and add tests for staged strategies

- Added new benchmark result files: compare_2026-02-09_score_fast3.json and compare_2026-02-09_score_fast4.json.
- Implemented KeepAliveLspBridge to maintain a persistent LSP connection across multiple queries, improving performance.
- Created unit tests for staged clustering strategies in test_staged_stage3_fast_strategies.py, ensuring correct behavior of score and dir_rr strategies.
This commit is contained in:
catlog22
2026-02-09 20:45:29 +08:00
parent c62d26183b
commit 4344e79e68
64 changed files with 6154 additions and 123 deletions

View File

@@ -69,6 +69,7 @@ export type {
export {
useIssues,
useIssueQueue,
useQueueHistory,
useCreateIssue,
useUpdateIssue,
useDeleteIssue,

View File

@@ -8,6 +8,7 @@ import {
fetchIssues,
fetchIssueHistory,
fetchIssueQueue,
fetchQueueHistory,
createIssue,
updateIssue,
deleteIssue,
@@ -16,12 +17,15 @@ import {
deleteQueue as deleteQueueApi,
mergeQueues as mergeQueuesApi,
splitQueue as splitQueueApi,
reorderQueueGroup as reorderQueueGroupApi,
moveQueueItem as moveQueueItemApi,
fetchDiscoveries,
fetchDiscoveryFindings,
exportDiscoveryFindingsAsIssues,
type Issue,
type IssueQueue,
type IssuesResponse,
type QueueHistoryIndex,
type DiscoverySession,
type Finding,
} from '../lib/api';
@@ -309,14 +313,31 @@ export interface UseQueueMutationsReturn {
deleteQueue: (queueId: string) => Promise<void>;
mergeQueues: (sourceId: string, targetId: string) => Promise<void>;
splitQueue: (sourceQueueId: string, itemIds: string[]) => Promise<void>;
reorderQueueGroup: (groupId: string, newOrder: string[]) => Promise<void>;
moveQueueItem: (itemId: string, toGroupId: string, toIndex?: number) => Promise<void>;
isActivating: boolean;
isDeactivating: boolean;
isDeleting: boolean;
isMerging: boolean;
isSplitting: boolean;
isReordering: boolean;
isMoving: boolean;
isMutating: boolean;
}
export function useQueueHistory(options?: { enabled?: boolean; refetchInterval?: number }): UseQueryResult<QueueHistoryIndex> {
const { enabled = true, refetchInterval = 0 } = options ?? {};
const projectPath = useWorkflowStore(selectProjectPath);
return useQuery({
queryKey: workspaceQueryKeys.issueQueueHistory(projectPath),
queryFn: () => fetchQueueHistory(projectPath),
staleTime: STALE_TIME,
enabled: enabled && !!projectPath,
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
retry: 2,
});
}
export function useQueueMutations(): UseQueueMutationsReturn {
const queryClient = useQueryClient();
const projectPath = useWorkflowStore(selectProjectPath);
@@ -325,6 +346,7 @@ export function useQueueMutations(): UseQueueMutationsReturn {
mutationFn: (queueId: string) => activateQueue(queueId, projectPath),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueueHistory(projectPath) });
},
});
@@ -332,6 +354,7 @@ export function useQueueMutations(): UseQueueMutationsReturn {
mutationFn: () => deactivateQueue(projectPath),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueueHistory(projectPath) });
},
});
@@ -339,6 +362,7 @@ export function useQueueMutations(): UseQueueMutationsReturn {
mutationFn: (queueId: string) => deleteQueueApi(queueId, projectPath),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueueHistory(projectPath) });
},
});
@@ -347,12 +371,30 @@ export function useQueueMutations(): UseQueueMutationsReturn {
mergeQueuesApi(sourceId, targetId, projectPath),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueueHistory(projectPath) });
},
});
const splitMutation = useMutation({
mutationFn: ({ sourceQueueId, itemIds }: { sourceQueueId: string; itemIds: string[] }) =>
splitQueueApi(sourceQueueId, itemIds, projectPath),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueueHistory(projectPath) });
},
});
const reorderMutation = useMutation({
mutationFn: ({ groupId, newOrder }: { groupId: string; newOrder: string[] }) =>
reorderQueueGroupApi(projectPath, { groupId, newOrder }),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
},
});
const moveMutation = useMutation({
mutationFn: ({ itemId, toGroupId, toIndex }: { itemId: string; toGroupId: string; toIndex?: number }) =>
moveQueueItemApi(projectPath, { itemId, toGroupId, toIndex }),
onSuccess: () => {
queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issueQueue(projectPath) });
},
@@ -364,12 +406,23 @@ export function useQueueMutations(): UseQueueMutationsReturn {
deleteQueue: deleteMutation.mutateAsync,
mergeQueues: (sourceId, targetId) => mergeMutation.mutateAsync({ sourceId, targetId }),
splitQueue: (sourceQueueId, itemIds) => splitMutation.mutateAsync({ sourceQueueId, itemIds }),
reorderQueueGroup: (groupId, newOrder) => reorderMutation.mutateAsync({ groupId, newOrder }).then(() => {}),
moveQueueItem: (itemId, toGroupId, toIndex) => moveMutation.mutateAsync({ itemId, toGroupId, toIndex }).then(() => {}),
isActivating: activateMutation.isPending,
isDeactivating: deactivateMutation.isPending,
isDeleting: deleteMutation.isPending,
isMerging: mergeMutation.isPending,
isSplitting: splitMutation.isPending,
isMutating: activateMutation.isPending || deactivateMutation.isPending || deleteMutation.isPending || mergeMutation.isPending || splitMutation.isPending,
isReordering: reorderMutation.isPending,
isMoving: moveMutation.isPending,
isMutating:
activateMutation.isPending ||
deactivateMutation.isPending ||
deleteMutation.isPending ||
mergeMutation.isPending ||
splitMutation.isPending ||
reorderMutation.isPending ||
moveMutation.isPending,
};
}

View File

@@ -8,6 +8,7 @@ import { useNotificationStore } from '@/stores';
import { useExecutionStore } from '@/stores/executionStore';
import { useFlowStore } from '@/stores';
import { useCliStreamStore } from '@/stores/cliStreamStore';
import { useCliSessionStore } from '@/stores/cliSessionStore';
import {
OrchestratorMessageSchema,
type OrchestratorWebSocketMessage,
@@ -28,6 +29,7 @@ function getStoreState() {
const execution = useExecutionStore.getState();
const flow = useFlowStore.getState();
const cliStream = useCliStreamStore.getState();
const cliSessions = useCliSessionStore.getState();
return {
// Notification store
setWsStatus: notification.setWsStatus,
@@ -56,6 +58,11 @@ function getStoreState() {
updateNode: flow.updateNode,
// CLI stream store
addOutput: cliStream.addOutput,
// CLI session store (PTY-backed terminal)
upsertCliSession: cliSessions.upsertSession,
removeCliSession: cliSessions.removeSession,
appendCliSessionOutput: cliSessions.appendOutput,
};
}
@@ -163,6 +170,31 @@ export function useWebSocket(options: UseWebSocketOptions = {}): UseWebSocketRet
break;
}
// ========== PTY CLI Sessions ==========
case 'CLI_SESSION_CREATED': {
const session = data.payload?.session;
if (session?.sessionKey) {
stores.upsertCliSession(session);
}
break;
}
case 'CLI_SESSION_OUTPUT': {
const { sessionKey, data: chunk } = data.payload ?? {};
if (typeof sessionKey === 'string' && typeof chunk === 'string') {
stores.appendCliSessionOutput(sessionKey, chunk);
}
break;
}
case 'CLI_SESSION_CLOSED': {
const { sessionKey } = data.payload ?? {};
if (typeof sessionKey === 'string') {
stores.removeCliSession(sessionKey);
}
break;
}
case 'CLI_OUTPUT': {
const { executionId, chunkType, data: outputData, unit } = data.payload;