mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat(a2ui): Implement A2UI backend with question handling and WebSocket support
- Added A2UITypes for defining question structures and answers. - Created A2UIWebSocketHandler for managing WebSocket connections and message handling. - Developed ask-question tool for interactive user questions via A2UI. - Introduced platformUtils for platform detection and shell command handling. - Centralized TypeScript types in index.ts for better organization. - Implemented compatibility checks for hook templates based on platform requirements.
This commit is contained in:
@@ -15,6 +15,11 @@ export type { UseConfigReturn } from './useConfig';
|
||||
export { useNotifications } from './useNotifications';
|
||||
export type { UseNotificationsReturn, ToastOptions } from './useNotifications';
|
||||
|
||||
export { useWebSocketNotifications } from './useWebSocketNotifications';
|
||||
|
||||
export { useSystemNotifications } from './useSystemNotifications';
|
||||
export type { UseSystemNotificationsReturn, SystemNotificationOptions } from './useSystemNotifications';
|
||||
|
||||
export { useDashboardStats, usePrefetchDashboardStats, dashboardStatsKeys } from './useDashboardStats';
|
||||
export type { UseDashboardStatsOptions, UseDashboardStatsReturn } from './useDashboardStats';
|
||||
|
||||
@@ -154,6 +159,8 @@ export {
|
||||
hooksKeys,
|
||||
useRules,
|
||||
useToggleRule,
|
||||
useCreateRule,
|
||||
useDeleteRule,
|
||||
rulesKeys,
|
||||
} from './useCli';
|
||||
export type {
|
||||
@@ -176,3 +183,11 @@ export type {
|
||||
UseCliExecutionOptions,
|
||||
UseCliExecutionReturn,
|
||||
} from './useCliExecution';
|
||||
|
||||
// ========== Workspace Query Keys ==========
|
||||
export {
|
||||
useWorkspaceQueryKeys,
|
||||
} from './useWorkspaceQueryKeys';
|
||||
export type {
|
||||
WorkspaceQueryKeys,
|
||||
} from './useWorkspaceQueryKeys';
|
||||
202
ccw/frontend/src/hooks/useActiveCliExecutions.ts
Normal file
202
ccw/frontend/src/hooks/useActiveCliExecutions.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
// ========================================
|
||||
// useActiveCliExecutions Hook
|
||||
// ========================================
|
||||
// Hook for syncing active CLI executions from server
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useCliStreamStore } from '@/stores/cliStreamStore';
|
||||
|
||||
/**
|
||||
* Response type from /api/cli/active endpoint
|
||||
*/
|
||||
interface ActiveCliExecution {
|
||||
id: string;
|
||||
tool: string;
|
||||
mode: string;
|
||||
status: 'running' | 'completed' | 'error';
|
||||
output?: string;
|
||||
startTime: number;
|
||||
isComplete?: boolean;
|
||||
}
|
||||
|
||||
interface ActiveCliExecutionsResponse {
|
||||
executions: ActiveCliExecution[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Maximum number of output lines to sync per execution
|
||||
*/
|
||||
const MAX_OUTPUT_LINES = 5000;
|
||||
|
||||
/**
|
||||
* Parse message type from content for proper formatting
|
||||
* Maps Chinese prefixes to output types
|
||||
*/
|
||||
function parseMessageType(content: string): { type: 'stdout' | 'stderr' | 'metadata' | 'thought' | 'system' | 'tool_call'; hasPrefix: boolean } {
|
||||
const patterns = {
|
||||
system: /^\[系统\]/,
|
||||
thought: /^\[思考\]/,
|
||||
response: /^\[响应\]/,
|
||||
result: /^\[结果\]/,
|
||||
error: /^\[错误\]/,
|
||||
warning: /^\[警告\]/,
|
||||
info: /^\[信息\]/
|
||||
};
|
||||
|
||||
for (const [type, pattern] of Object.entries(patterns)) {
|
||||
if (pattern.test(content)) {
|
||||
const typeMap: Record<string, 'stdout' | 'stderr' | 'metadata' | 'thought' | 'system' | 'tool_call'> = {
|
||||
system: 'system',
|
||||
thought: 'thought',
|
||||
response: 'stdout',
|
||||
result: 'metadata',
|
||||
error: 'stderr',
|
||||
warning: 'stderr',
|
||||
info: 'metadata'
|
||||
};
|
||||
return { type: typeMap[type] || 'stdout', hasPrefix: true };
|
||||
}
|
||||
}
|
||||
return { type: 'stdout', hasPrefix: false };
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse historical output from server response
|
||||
*/
|
||||
function parseHistoricalOutput(rawOutput: string, startTime: number) {
|
||||
if (!rawOutput) return [];
|
||||
|
||||
const lines = rawOutput.split('\n');
|
||||
const startIndex = Math.max(0, lines.length - MAX_OUTPUT_LINES + 1);
|
||||
const historicalLines: Array<{ type: 'stdout' | 'stderr' | 'metadata' | 'thought' | 'system' | 'tool_call'; content: string; timestamp: number }> = [];
|
||||
|
||||
lines.slice(startIndex).forEach(line => {
|
||||
if (line.trim()) {
|
||||
const { type } = parseMessageType(line);
|
||||
historicalLines.push({
|
||||
type,
|
||||
content: line,
|
||||
timestamp: startTime || Date.now()
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return historicalLines;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query key for active CLI executions
|
||||
*/
|
||||
export const ACTIVE_CLI_EXECUTIONS_QUERY_KEY = ['cliActive'];
|
||||
|
||||
/**
|
||||
* Hook to sync active CLI executions from server
|
||||
*
|
||||
* @param enabled - Whether the query should be enabled
|
||||
* @param refetchInterval - Refetch interval in milliseconds (default: 5000)
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { data: executions, isLoading } = useActiveCliExecutions(true);
|
||||
* ```
|
||||
*/
|
||||
export function useActiveCliExecutions(
|
||||
enabled: boolean,
|
||||
refetchInterval: number = 5000
|
||||
) {
|
||||
const upsertExecution = useCliStreamStore(state => state.upsertExecution);
|
||||
const executions = useCliStreamStore(state => state.executions);
|
||||
const setCurrentExecution = useCliStreamStore(state => state.setCurrentExecution);
|
||||
|
||||
return useQuery({
|
||||
queryKey: ACTIVE_CLI_EXECUTIONS_QUERY_KEY,
|
||||
queryFn: async () => {
|
||||
const response = await fetch('/api/cli/active');
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch active executions: ${response.statusText}`);
|
||||
}
|
||||
const data: ActiveCliExecutionsResponse = await response.json();
|
||||
|
||||
// Process executions and sync to store
|
||||
let hasNewExecution = false;
|
||||
const now = Date.now();
|
||||
|
||||
for (const exec of data.executions) {
|
||||
const existing = executions[exec.id];
|
||||
const historicalOutput = parseHistoricalOutput(exec.output || '', exec.startTime);
|
||||
|
||||
if (!existing) {
|
||||
hasNewExecution = true;
|
||||
}
|
||||
|
||||
// Merge existing output with historical output
|
||||
const existingOutput = existing?.output || [];
|
||||
const existingContentSet = new Set(existingOutput.map(o => o.content));
|
||||
const missingLines = historicalOutput.filter(h => !existingContentSet.has(h.content));
|
||||
|
||||
// Prepend missing historical lines before existing output
|
||||
// Skip system start message when prepending
|
||||
const systemMsgIndex = existingOutput.findIndex(o => o.type === 'system');
|
||||
const insertIndex = systemMsgIndex >= 0 ? systemMsgIndex + 1 : 0;
|
||||
|
||||
const mergedOutput = [...existingOutput];
|
||||
if (missingLines.length > 0) {
|
||||
mergedOutput.splice(insertIndex, 0, ...missingLines);
|
||||
}
|
||||
|
||||
// Trim if too long
|
||||
if (mergedOutput.length > MAX_OUTPUT_LINES) {
|
||||
mergedOutput.splice(0, mergedOutput.length - MAX_OUTPUT_LINES);
|
||||
}
|
||||
|
||||
// Add system message for new executions
|
||||
let finalOutput = mergedOutput;
|
||||
if (!existing) {
|
||||
finalOutput = [
|
||||
{
|
||||
type: 'system',
|
||||
content: `[${new Date(exec.startTime).toLocaleTimeString()}] CLI execution started: ${exec.tool} (${exec.mode} mode)`,
|
||||
timestamp: exec.startTime
|
||||
},
|
||||
...mergedOutput
|
||||
];
|
||||
}
|
||||
|
||||
upsertExecution(exec.id, {
|
||||
tool: exec.tool || 'cli',
|
||||
mode: exec.mode || 'analysis',
|
||||
status: exec.status || 'running',
|
||||
output: finalOutput,
|
||||
startTime: exec.startTime || Date.now(),
|
||||
endTime: exec.status !== 'running' ? now : undefined,
|
||||
recovered: !existing
|
||||
});
|
||||
}
|
||||
|
||||
// Set current execution to first running execution if none selected
|
||||
if (hasNewExecution) {
|
||||
const runningExec = data.executions.find(e => e.status === 'running');
|
||||
if (runningExec && !executions[runningExec.id]) {
|
||||
setCurrentExecution(runningExec.id);
|
||||
}
|
||||
}
|
||||
|
||||
return data.executions;
|
||||
},
|
||||
enabled,
|
||||
refetchInterval,
|
||||
staleTime: 2000, // Consider data fresh for 2 seconds
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook to invalidate active CLI executions query
|
||||
* Use this to trigger a refetch after an execution event
|
||||
*/
|
||||
export function useInvalidateActiveCliExecutions() {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
return () => {
|
||||
queryClient.invalidateQueries({ queryKey: ACTIVE_CLI_EXECUTIONS_QUERY_KEY });
|
||||
};
|
||||
}
|
||||
@@ -349,8 +349,11 @@ export function useToggleHook() {
|
||||
import {
|
||||
fetchRules,
|
||||
toggleRule,
|
||||
createRule as createRuleApi,
|
||||
deleteRule as deleteRuleApi,
|
||||
type Rule,
|
||||
type RulesResponse,
|
||||
type RuleCreateInput,
|
||||
} from '../lib/api';
|
||||
|
||||
export const rulesKeys = {
|
||||
@@ -446,3 +449,64 @@ export function useToggleRule() {
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
export function useCreateRule() {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (input: RuleCreateInput) => createRuleApi(input),
|
||||
onSuccess: (newRule) => {
|
||||
queryClient.setQueryData<RulesResponse>(rulesKeys.lists(), (old) => {
|
||||
if (!old) return { rules: [newRule] };
|
||||
return {
|
||||
rules: [newRule, ...old.rules],
|
||||
};
|
||||
});
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: rulesKeys.all });
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
createRule: mutation.mutateAsync,
|
||||
isCreating: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
export function useDeleteRule() {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ ruleId, location }: { ruleId: string; location?: string }) =>
|
||||
deleteRuleApi(ruleId, location),
|
||||
onMutate: async ({ ruleId }) => {
|
||||
await queryClient.cancelQueries({ queryKey: rulesKeys.all });
|
||||
const previousRules = queryClient.getQueryData<RulesResponse>(rulesKeys.lists());
|
||||
|
||||
queryClient.setQueryData<RulesResponse>(rulesKeys.lists(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
rules: old.rules.filter((r) => r.id !== ruleId),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousRules };
|
||||
},
|
||||
onError: (_error, _vars, context) => {
|
||||
if (context?.previousRules) {
|
||||
queryClient.setQueryData(rulesKeys.lists(), context.previousRules);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: rulesKeys.all });
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
deleteRule: (ruleId: string, location?: string) => mutation.mutateAsync({ ruleId, location }),
|
||||
isDeleting: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -5,6 +5,8 @@
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { fetchDashboardStats, type DashboardStats } from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
// Query key factory
|
||||
export const dashboardStatsKeys = {
|
||||
@@ -64,12 +66,16 @@ export function useDashboardStats(
|
||||
): UseDashboardStatsReturn {
|
||||
const { staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: dashboardStatsKeys.detail(),
|
||||
queryKey: workspaceQueryKeys.projectOverview(projectPath),
|
||||
queryFn: fetchDashboardStats,
|
||||
staleTime,
|
||||
enabled,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
@@ -80,7 +86,9 @@ export function useDashboardStats(
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: dashboardStatsKeys.all });
|
||||
if (projectPath) {
|
||||
await queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.all(projectPath) });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -100,12 +108,15 @@ export function useDashboardStats(
|
||||
*/
|
||||
export function usePrefetchDashboardStats() {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
return () => {
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: dashboardStatsKeys.detail(),
|
||||
queryFn: fetchDashboardStats,
|
||||
staleTime: STALE_TIME,
|
||||
});
|
||||
if (projectPath) {
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: workspaceQueryKeys.projectOverview(projectPath),
|
||||
queryFn: fetchDashboardStats,
|
||||
staleTime: STALE_TIME,
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
450
ccw/frontend/src/hooks/useFileExplorer.ts
Normal file
450
ccw/frontend/src/hooks/useFileExplorer.ts
Normal file
@@ -0,0 +1,450 @@
|
||||
// ========================================
|
||||
// useFileExplorer Hook
|
||||
// ========================================
|
||||
// TanStack Query hooks for File Explorer with WebSocket subscription
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useState, useCallback, useEffect, useRef } from 'react';
|
||||
import {
|
||||
fetchFileTree,
|
||||
fetchFileContent,
|
||||
fetchRootDirectories,
|
||||
searchFiles,
|
||||
type RootDirectory,
|
||||
type SearchFilesResponse,
|
||||
} from '../lib/api';
|
||||
import type { FileSystemNode, FileContent, ExplorerState } from '../types/file-explorer';
|
||||
|
||||
// Query key factory
|
||||
export const fileExplorerKeys = {
|
||||
all: ['fileExplorer'] as const,
|
||||
trees: () => [...fileExplorerKeys.all, 'tree'] as const,
|
||||
tree: (rootPath: string) => [...fileExplorerKeys.trees(), rootPath] as const,
|
||||
contents: () => [...fileExplorerKeys.all, 'content'] as const,
|
||||
content: (path: string) => [...fileExplorerKeys.contents(), path] as const,
|
||||
roots: () => [...fileExplorerKeys.all, 'roots'] as const,
|
||||
search: (query: string) => [...fileExplorerKeys.all, 'search', query] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 5 minutes for file tree (stable structure)
|
||||
const TREE_STALE_TIME = 5 * 60 * 1000;
|
||||
// Default stale time: 10 minutes for file content
|
||||
const CONTENT_STALE_TIME = 10 * 60 * 1000;
|
||||
|
||||
export interface UseFileExplorerOptions {
|
||||
/** Root directory path (default: '/') */
|
||||
rootPath?: string;
|
||||
/** Maximum tree depth (0 = unlimited) */
|
||||
maxDepth?: number;
|
||||
/** Include hidden files */
|
||||
includeHidden?: boolean;
|
||||
/** File patterns to exclude (glob patterns) */
|
||||
excludePatterns?: string[];
|
||||
/** Override default stale time (ms) */
|
||||
staleTime?: number;
|
||||
/** Enable/disable the query */
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
export interface UseFileExplorerReturn {
|
||||
/** Current explorer state */
|
||||
state: ExplorerState;
|
||||
/** Root nodes of the file tree */
|
||||
rootNodes: FileSystemNode[];
|
||||
/** Loading state for initial fetch */
|
||||
isLoading: boolean;
|
||||
/** Fetching state (initial or refetch) */
|
||||
isFetching: boolean;
|
||||
/** Error object if query failed */
|
||||
error: Error | null;
|
||||
/** Manually refetch file tree */
|
||||
refetch: () => Promise<void>;
|
||||
/** Set the selected file path */
|
||||
setSelectedFile: (path: string | null) => void;
|
||||
/** Toggle directory expanded state */
|
||||
toggleExpanded: (path: string) => void;
|
||||
/** Expand a directory */
|
||||
expandDirectory: (path: string) => void;
|
||||
/** Collapse a directory */
|
||||
collapseDirectory: (path: string) => void;
|
||||
/** Expand all directories */
|
||||
expandAll: () => void;
|
||||
/** Collapse all directories */
|
||||
collapseAll: () => void;
|
||||
/** Set view mode */
|
||||
setViewMode: (mode: ExplorerState['viewMode']) => void;
|
||||
/** Set sort order */
|
||||
setSortOrder: (order: ExplorerState['sortOrder']) => void;
|
||||
/** Toggle hidden files visibility */
|
||||
toggleShowHidden: () => void;
|
||||
/** Set filter string */
|
||||
setFilter: (filter: string) => void;
|
||||
/** Load file content */
|
||||
loadFileContent: (path: string) => Promise<FileContent | undefined>;
|
||||
/** Available root directories */
|
||||
rootDirectories: RootDirectory[] | undefined;
|
||||
/** Root directories loading state */
|
||||
isLoadingRoots: boolean;
|
||||
/** Search files */
|
||||
searchFiles: (query: string) => Promise<SearchFilesResponse | undefined>;
|
||||
/** Search results */
|
||||
searchResults: SearchFilesResponse | undefined;
|
||||
/** Is searching */
|
||||
isSearching: boolean;
|
||||
/** Clear file content cache */
|
||||
clearFileCache: (path?: string) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for File Explorer with WebSocket subscription for real-time updates
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { rootNodes, state, setSelectedFile, toggleExpanded } = useFileExplorer({
|
||||
* rootPath: '/src'
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function useFileExplorer(options: UseFileExplorerOptions = {}): UseFileExplorerReturn {
|
||||
const {
|
||||
rootPath = '/',
|
||||
maxDepth = 5,
|
||||
includeHidden = false,
|
||||
excludePatterns,
|
||||
staleTime,
|
||||
enabled = true,
|
||||
} = options;
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
// Explorer state
|
||||
const [expandedPaths, setExpandedPaths] = useState<Set<string>>(new Set([rootPath]));
|
||||
const [selectedFile, setSelectedFileState] = useState<string | null>(null);
|
||||
const [viewMode, setViewModeState] = useState<ExplorerState['viewMode']>('tree');
|
||||
const [sortOrder, setSortOrderState] = useState<ExplorerState['sortOrder']>('name');
|
||||
const [showHiddenFiles, setShowHiddenFiles] = useState(false);
|
||||
const [filter, setFilterState] = useState('');
|
||||
const [searchResults, setSearchResults] = useState<SearchFilesResponse | undefined>();
|
||||
|
||||
// Fetch file tree
|
||||
const treeQuery = useQuery({
|
||||
queryKey: fileExplorerKeys.tree(rootPath),
|
||||
queryFn: () => fetchFileTree(rootPath, { maxDepth, includeHidden, excludePatterns }),
|
||||
staleTime: staleTime ?? TREE_STALE_TIME,
|
||||
enabled,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
});
|
||||
|
||||
// Fetch root directories
|
||||
const rootsQuery = useQuery({
|
||||
queryKey: fileExplorerKeys.roots(),
|
||||
queryFn: fetchRootDirectories,
|
||||
staleTime: TREE_STALE_TIME,
|
||||
enabled,
|
||||
retry: 1,
|
||||
});
|
||||
|
||||
const rootNodes = treeQuery.data?.rootNodes ?? [];
|
||||
const rootDirectories = rootsQuery.data;
|
||||
|
||||
// Toggle expanded state
|
||||
const toggleExpanded = useCallback((path: string) => {
|
||||
setExpandedPaths((prev) => {
|
||||
const next = new Set(prev);
|
||||
if (next.has(path)) {
|
||||
next.delete(path);
|
||||
} else {
|
||||
next.add(path);
|
||||
}
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Expand directory
|
||||
const expandDirectory = useCallback((path: string) => {
|
||||
setExpandedPaths((prev) => new Set([...prev, path]));
|
||||
}, []);
|
||||
|
||||
// Collapse directory
|
||||
const collapseDirectory = useCallback((path: string) => {
|
||||
setExpandedPaths((prev) => {
|
||||
const next = new Set(prev);
|
||||
next.delete(path);
|
||||
return next;
|
||||
});
|
||||
}, []);
|
||||
|
||||
// Expand all directories
|
||||
const expandAll = useCallback(() => {
|
||||
const allPaths = new Set<string>();
|
||||
const collectPaths = (nodes: FileSystemNode[]) => {
|
||||
for (const node of nodes) {
|
||||
if (node.type === 'directory') {
|
||||
allPaths.add(node.path);
|
||||
if (node.children) {
|
||||
collectPaths(node.children);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
collectPaths(rootNodes);
|
||||
setExpandedPaths(allPaths);
|
||||
}, [rootNodes]);
|
||||
|
||||
// Collapse all directories
|
||||
const collapseAll = useCallback(() => {
|
||||
setExpandedPaths(new Set([rootPath]));
|
||||
}, [rootPath]);
|
||||
|
||||
// Set selected file
|
||||
const setSelectedFile = useCallback((path: string | null) => {
|
||||
setSelectedFileState(path);
|
||||
// Add to query cache for quick access
|
||||
if (path) {
|
||||
queryClient.prefetchQuery({
|
||||
queryKey: fileExplorerKeys.content(path),
|
||||
queryFn: () => fetchFileContent(path),
|
||||
staleTime: CONTENT_STALE_TIME,
|
||||
});
|
||||
}
|
||||
}, [queryClient]);
|
||||
|
||||
// Set view mode
|
||||
const setViewMode = useCallback((mode: ExplorerState['viewMode']) => {
|
||||
setViewModeState(mode);
|
||||
}, []);
|
||||
|
||||
// Set sort order
|
||||
const setSortOrder = useCallback((order: ExplorerState['sortOrder']) => {
|
||||
setSortOrderState(order);
|
||||
}, []);
|
||||
|
||||
// Toggle hidden files
|
||||
const toggleShowHidden = useCallback(() => {
|
||||
setShowHiddenFiles((prev) => !prev);
|
||||
}, []);
|
||||
|
||||
// Set filter
|
||||
const setFilter = useCallback((value: string) => {
|
||||
setFilterState(value);
|
||||
}, []);
|
||||
|
||||
// Load file content
|
||||
const loadFileContent = useCallback(async (path: string) => {
|
||||
try {
|
||||
const content = await queryClient.fetchQuery({
|
||||
queryKey: fileExplorerKeys.content(path),
|
||||
queryFn: () => fetchFileContent(path),
|
||||
staleTime: CONTENT_STALE_TIME,
|
||||
});
|
||||
return content;
|
||||
} catch (error) {
|
||||
console.error(`[useFileExplorer] Failed to load file content: ${path}`, error);
|
||||
throw error;
|
||||
}
|
||||
}, [queryClient]);
|
||||
|
||||
// Search files
|
||||
const searchFilesHandler = useCallback(async (query: string) => {
|
||||
if (!query.trim()) {
|
||||
setSearchResults(undefined);
|
||||
return undefined;
|
||||
}
|
||||
try {
|
||||
const results = await queryClient.fetchQuery({
|
||||
queryKey: fileExplorerKeys.search(query),
|
||||
queryFn: () => searchFiles({ rootPath, query, maxResults: 100 }),
|
||||
staleTime: 60000, // 1 minute
|
||||
});
|
||||
setSearchResults(results);
|
||||
return results;
|
||||
} catch (error) {
|
||||
console.error('[useFileExplorer] Search failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}, [queryClient, rootPath]);
|
||||
|
||||
const isSearching = queryClient.isFetching({ queryKey: fileExplorerKeys.all }) > 0;
|
||||
|
||||
// Clear file cache
|
||||
const clearFileCache = useCallback((path?: string) => {
|
||||
if (path) {
|
||||
queryClient.removeQueries({ queryKey: fileExplorerKeys.content(path) });
|
||||
} else {
|
||||
queryClient.removeQueries({ queryKey: fileExplorerKeys.contents() });
|
||||
}
|
||||
}, [queryClient]);
|
||||
|
||||
// Refetch
|
||||
const refetch = async () => {
|
||||
await treeQuery.refetch();
|
||||
};
|
||||
|
||||
// Build explorer state object
|
||||
const state: ExplorerState = {
|
||||
currentPath: rootPath,
|
||||
selectedFile,
|
||||
expandedPaths,
|
||||
fileTree: rootNodes,
|
||||
viewMode,
|
||||
sortOrder,
|
||||
showHiddenFiles,
|
||||
filter,
|
||||
isLoading: treeQuery.isLoading,
|
||||
error: treeQuery.error?.message ?? null,
|
||||
fileContents: {},
|
||||
recentFiles: [],
|
||||
maxRecentFiles: 10,
|
||||
directoriesFirst: true,
|
||||
};
|
||||
|
||||
return {
|
||||
state,
|
||||
rootNodes,
|
||||
isLoading: treeQuery.isLoading,
|
||||
isFetching: treeQuery.isFetching,
|
||||
error: treeQuery.error,
|
||||
refetch,
|
||||
setSelectedFile,
|
||||
toggleExpanded,
|
||||
expandDirectory,
|
||||
collapseDirectory,
|
||||
expandAll,
|
||||
collapseAll,
|
||||
setViewMode,
|
||||
setSortOrder,
|
||||
toggleShowHidden,
|
||||
setFilter,
|
||||
loadFileContent,
|
||||
rootDirectories,
|
||||
isLoadingRoots: rootsQuery.isLoading,
|
||||
searchFiles: searchFilesHandler,
|
||||
searchResults,
|
||||
isSearching,
|
||||
clearFileCache,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for file content with caching
|
||||
*/
|
||||
export function useFileContent(filePath: string | null, options: {
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
} = {}) {
|
||||
const { enabled = true, staleTime = CONTENT_STALE_TIME } = options;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: fileExplorerKeys.content(filePath ?? ''),
|
||||
queryFn: () => fetchFileContent(filePath ?? ''),
|
||||
staleTime,
|
||||
enabled: enabled && !!filePath,
|
||||
retry: 1,
|
||||
});
|
||||
|
||||
return {
|
||||
content: query.data,
|
||||
isLoading: query.isLoading,
|
||||
error: query.error,
|
||||
refetch: () => query.refetch(),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* WebSocket hook for real-time file updates
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { isConnected } = useFileExplorerWebSocket({
|
||||
* onFileChanged: (path) => {
|
||||
* console.log('File changed:', path);
|
||||
* refetch();
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export interface UseFileExplorerWebSocketOptions {
|
||||
/** Enable WebSocket connection */
|
||||
enabled?: boolean;
|
||||
/** Callback when file changes */
|
||||
onFileChanged?: (path: string) => void;
|
||||
/** Callback when directory changes */
|
||||
onDirectoryChanged?: (path: string) => void;
|
||||
}
|
||||
|
||||
export interface UseFileExplorerWebSocketReturn {
|
||||
/** WebSocket connection status */
|
||||
isConnected: boolean;
|
||||
}
|
||||
|
||||
export function useFileExplorerWebSocket(
|
||||
options: UseFileExplorerWebSocketOptions = {}
|
||||
): UseFileExplorerWebSocketReturn {
|
||||
const { enabled = true, onFileChanged, onDirectoryChanged } = options;
|
||||
const wsRef = useRef<WebSocket | null>(null);
|
||||
const [isConnected, setIsConnected] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (!enabled) return;
|
||||
|
||||
// Construct WebSocket URL
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = `${protocol}//${window.location.host}/ws`;
|
||||
|
||||
try {
|
||||
const ws = new WebSocket(wsUrl);
|
||||
wsRef.current = ws;
|
||||
|
||||
ws.onopen = () => {
|
||||
console.log('[FileExplorerWS] Connected');
|
||||
setIsConnected(true);
|
||||
};
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
// Handle file system change events
|
||||
if (data.type === 'FILE_CHANGED') {
|
||||
const { path } = data.payload || {};
|
||||
if (path) {
|
||||
onFileChanged?.(path);
|
||||
}
|
||||
} else if (data.type === 'DIRECTORY_CHANGED') {
|
||||
const { path } = data.payload || {};
|
||||
if (path) {
|
||||
onDirectoryChanged?.(path);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[FileExplorerWS] Failed to parse message:', error);
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
console.log('[FileExplorerWS] Disconnected');
|
||||
setIsConnected(false);
|
||||
wsRef.current = null;
|
||||
};
|
||||
|
||||
ws.onerror = (error) => {
|
||||
console.error('[FileExplorerWS] Error:', error);
|
||||
setIsConnected(false);
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[FileExplorerWS] Failed to connect:', error);
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
wsRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [enabled, onFileChanged, onDirectoryChanged]);
|
||||
|
||||
return { isConnected };
|
||||
}
|
||||
|
||||
export default useFileExplorer;
|
||||
308
ccw/frontend/src/hooks/useGraphData.ts
Normal file
308
ccw/frontend/src/hooks/useGraphData.ts
Normal file
@@ -0,0 +1,308 @@
|
||||
// ========================================
|
||||
// useGraphData Hook
|
||||
// ========================================
|
||||
// TanStack Query hooks for Graph Explorer with data transformation
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
fetchGraphDependencies,
|
||||
fetchGraphImpact,
|
||||
type GraphDependenciesRequest,
|
||||
type GraphDependenciesResponse,
|
||||
type GraphImpactRequest,
|
||||
type GraphImpactResponse,
|
||||
} from '../lib/api';
|
||||
import type {
|
||||
GraphData,
|
||||
GraphNode,
|
||||
GraphEdge,
|
||||
GraphFilters,
|
||||
GraphMetadata,
|
||||
NodeType,
|
||||
EdgeType,
|
||||
} from '../types/graph-explorer';
|
||||
|
||||
// Query key factory
|
||||
export const graphKeys = {
|
||||
all: ['graph'] as const,
|
||||
dependencies: () => [...graphKeys.all, 'dependencies'] as const,
|
||||
dependency: (request: GraphDependenciesRequest) => [...graphKeys.dependencies(), request] as const,
|
||||
impact: (nodeId: string) => [...graphKeys.all, 'impact', nodeId] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 5 minutes (graph data doesn't change frequently)
|
||||
const STALE_TIME = 5 * 60 * 1000;
|
||||
|
||||
export interface UseGraphDataOptions {
|
||||
/** Override default stale time (ms) */
|
||||
staleTime?: number;
|
||||
/** Enable/disable the query */
|
||||
enabled?: boolean;
|
||||
/** Root path for analysis */
|
||||
rootPath?: string;
|
||||
/** Maximum depth for traversal */
|
||||
maxDepth?: number;
|
||||
/** Filter by node types */
|
||||
nodeTypes?: NodeType[];
|
||||
/** Filter by edge types */
|
||||
edgeTypes?: EdgeType[];
|
||||
}
|
||||
|
||||
export interface UseGraphDataReturn {
|
||||
/** Graph data with nodes and edges */
|
||||
graphData: GraphData | undefined;
|
||||
/** Loading state for initial fetch */
|
||||
isLoading: boolean;
|
||||
/** Fetching state (initial or refetch) */
|
||||
isFetching: boolean;
|
||||
/** Error object if query failed */
|
||||
error: Error | null;
|
||||
/** Manually refetch data */
|
||||
refetch: () => Promise<void>;
|
||||
/** Invalidate and refetch graph data */
|
||||
invalidate: () => Promise<void>;
|
||||
/** Apply filters to graph data */
|
||||
applyFilters: (filters: GraphFilters) => GraphData | undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform API response to GraphData format
|
||||
*/
|
||||
function transformToGraphData(response: GraphDependenciesResponse): GraphData {
|
||||
return {
|
||||
nodes: response.nodes,
|
||||
edges: response.edges,
|
||||
metadata: response.metadata,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply filters to graph data
|
||||
*/
|
||||
function filterGraphData(
|
||||
graphData: GraphData | undefined,
|
||||
filters: GraphFilters
|
||||
): GraphData | undefined {
|
||||
if (!graphData) return undefined;
|
||||
|
||||
let filteredNodes = [...graphData.nodes];
|
||||
let filteredEdges = [...graphData.edges];
|
||||
|
||||
// Filter by node types
|
||||
if (filters.nodeTypes && filters.nodeTypes.length > 0) {
|
||||
const nodeTypeSet = new Set(filters.nodeTypes);
|
||||
filteredNodes = filteredNodes.filter(node => node.type && nodeTypeSet.has(node.type));
|
||||
}
|
||||
|
||||
// Filter by edge types
|
||||
if (filters.edgeTypes && filters.edgeTypes.length > 0) {
|
||||
const edgeTypeSet = new Set(filters.edgeTypes);
|
||||
filteredEdges = filteredEdges.filter(edge => edge.data?.edgeType && edgeTypeSet.has(edge.data.edgeType));
|
||||
}
|
||||
|
||||
// Filter by search query
|
||||
if (filters.searchQuery) {
|
||||
const query = filters.searchQuery.toLowerCase();
|
||||
filteredNodes = filteredNodes.filter(node =>
|
||||
node.data.label.toLowerCase().includes(query) ||
|
||||
node.data.filePath?.toLowerCase().includes(query)
|
||||
);
|
||||
}
|
||||
|
||||
// Filter by file path pattern
|
||||
if (filters.filePathPattern) {
|
||||
const pattern = new RegExp(filters.filePathPattern, 'i');
|
||||
filteredNodes = filteredNodes.filter(node =>
|
||||
node.data.filePath?.match(pattern)
|
||||
);
|
||||
}
|
||||
|
||||
// Filter by categories
|
||||
if (filters.categories && filters.categories.length > 0) {
|
||||
const categorySet = new Set(filters.categories);
|
||||
filteredNodes = filteredNodes.filter(node =>
|
||||
node.data.category && categorySet.has(node.data.category)
|
||||
);
|
||||
}
|
||||
|
||||
// Filter only nodes with issues
|
||||
if (filters.showOnlyIssues) {
|
||||
filteredNodes = filteredNodes.filter(node => node.data.hasIssues);
|
||||
}
|
||||
|
||||
// Filter by minimum complexity
|
||||
if (filters.minComplexity !== undefined) {
|
||||
filteredNodes = filteredNodes.filter(node => {
|
||||
// This would require complexity data to be available
|
||||
// For now, we'll skip this filter
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
// Filter by tags
|
||||
if (filters.tags && filters.tags.length > 0) {
|
||||
const tagSet = new Set(filters.tags);
|
||||
filteredNodes = filteredNodes.filter(node =>
|
||||
node.data.tags?.some(tag => tagSet.has(tag))
|
||||
);
|
||||
}
|
||||
|
||||
// Exclude tags
|
||||
if (filters.excludeTags && filters.excludeTags.length > 0) {
|
||||
const excludeTagSet = new Set(filters.excludeTags);
|
||||
filteredNodes = filteredNodes.filter(node =>
|
||||
!node.data.tags?.some(tag => excludeTagSet.has(tag))
|
||||
);
|
||||
}
|
||||
|
||||
// Show/hide isolated nodes
|
||||
if (!filters.showIsolatedNodes) {
|
||||
const connectedNodeIds = new Set<string>();
|
||||
filteredEdges.forEach(edge => {
|
||||
connectedNodeIds.add(edge.source);
|
||||
connectedNodeIds.add(edge.target);
|
||||
});
|
||||
filteredNodes = filteredNodes.filter(node => connectedNodeIds.has(node.id));
|
||||
}
|
||||
|
||||
// Build set of visible node IDs
|
||||
const visibleNodeIds = new Set(filteredNodes.map(node => node.id));
|
||||
|
||||
// Filter edges to only include edges between visible nodes
|
||||
filteredEdges = filteredEdges.filter(edge =>
|
||||
visibleNodeIds.has(edge.source) && visibleNodeIds.has(edge.target)
|
||||
);
|
||||
|
||||
// Apply max depth filter (focus on specific node)
|
||||
if (filters.focusNodeId) {
|
||||
const focusNode = filteredNodes.find(n => n.id === filters.focusNodeId);
|
||||
if (focusNode) {
|
||||
// Collect nodes within max depth
|
||||
const nodesWithinDepth = new Set<string>([filters.focusNodeId]);
|
||||
const visited = new Set<string>();
|
||||
|
||||
const traverse = (nodeId: string, depth: number) => {
|
||||
if (depth > (filters.maxDepth || 3)) return;
|
||||
if (visited.has(nodeId)) return;
|
||||
visited.add(nodeId);
|
||||
|
||||
filteredEdges.forEach(edge => {
|
||||
if (edge.source === nodeId && !nodesWithinDepth.has(edge.target)) {
|
||||
nodesWithinDepth.add(edge.target);
|
||||
traverse(edge.target, depth + 1);
|
||||
}
|
||||
if (edge.target === nodeId && !nodesWithinDepth.has(edge.source)) {
|
||||
nodesWithinDepth.add(edge.source);
|
||||
traverse(edge.source, depth + 1);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
traverse(filters.focusNodeId, 0);
|
||||
|
||||
filteredNodes = filteredNodes.filter(node => nodesWithinDepth.has(node.id));
|
||||
const depthNodeIds = new Set(nodesWithinDepth);
|
||||
filteredEdges = filteredEdges.filter(edge =>
|
||||
depthNodeIds.has(edge.source) && depthNodeIds.has(edge.target)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
nodes: filteredNodes,
|
||||
edges: filteredEdges,
|
||||
metadata: graphData.metadata,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching and filtering graph data
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { graphData, isLoading, applyFilters } = useGraphData({
|
||||
* rootPath: '/src',
|
||||
* maxDepth: 3
|
||||
* });
|
||||
*
|
||||
* // Apply filters
|
||||
* const filteredData = applyFilters({
|
||||
* nodeTypes: ['component', 'hook'],
|
||||
* edgeTypes: ['imports', 'uses']
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function useGraphData(options: UseGraphDataOptions = {}): UseGraphDataReturn {
|
||||
const {
|
||||
staleTime = STALE_TIME,
|
||||
enabled = true,
|
||||
rootPath,
|
||||
maxDepth,
|
||||
nodeTypes,
|
||||
edgeTypes,
|
||||
} = options;
|
||||
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const request: GraphDependenciesRequest = {
|
||||
rootPath,
|
||||
maxDepth,
|
||||
includeTypes: nodeTypes,
|
||||
};
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: graphKeys.dependency(request),
|
||||
queryFn: () => fetchGraphDependencies(request),
|
||||
staleTime,
|
||||
enabled,
|
||||
retry: 2,
|
||||
select: transformToGraphData,
|
||||
});
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: graphKeys.all });
|
||||
};
|
||||
|
||||
const applyFilters = (filters: GraphFilters) => {
|
||||
return filterGraphData(query.data, filters);
|
||||
};
|
||||
|
||||
return {
|
||||
graphData: query.data,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error as Error | null,
|
||||
refetch,
|
||||
invalidate,
|
||||
applyFilters,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching impact analysis for a specific node
|
||||
*/
|
||||
export function useGraphImpact(
|
||||
nodeId: string | null,
|
||||
options: {
|
||||
direction?: 'upstream' | 'downstream' | 'both';
|
||||
maxDepth?: number;
|
||||
enabled?: boolean;
|
||||
} = {}
|
||||
) {
|
||||
const { direction = 'both', maxDepth = 3, enabled = true } = options;
|
||||
|
||||
return useQuery({
|
||||
queryKey: graphKeys.impact(nodeId || ''),
|
||||
queryFn: () => {
|
||||
if (!nodeId) throw new Error('Node ID is required');
|
||||
return fetchGraphImpact({ nodeId, direction, maxDepth });
|
||||
},
|
||||
enabled: enabled && !!nodeId,
|
||||
staleTime: STALE_TIME,
|
||||
retry: 1,
|
||||
});
|
||||
}
|
||||
143
ccw/frontend/src/hooks/useIndex.ts
Normal file
143
ccw/frontend/src/hooks/useIndex.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
// ========================================
|
||||
// useIndex Hook
|
||||
// ========================================
|
||||
// TanStack Query hooks for index management with real-time updates
|
||||
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
fetchIndexStatus,
|
||||
rebuildIndex,
|
||||
type IndexStatus,
|
||||
type IndexRebuildRequest,
|
||||
} from '../lib/api';
|
||||
|
||||
// ========== Query Keys ==========
|
||||
|
||||
export const indexKeys = {
|
||||
all: ['index'] as const,
|
||||
status: () => [...indexKeys.all, 'status'] as const,
|
||||
};
|
||||
|
||||
// ========== Stale Time ==========
|
||||
|
||||
// Default stale time: 30 seconds (index status updates less frequently)
|
||||
const STALE_TIME = 30 * 1000;
|
||||
|
||||
// ========== Query Hook ==========
|
||||
|
||||
export interface UseIndexStatusOptions {
|
||||
enabled?: boolean;
|
||||
staleTime?: number;
|
||||
refetchInterval?: number;
|
||||
}
|
||||
|
||||
export interface UseIndexStatusReturn {
|
||||
status: IndexStatus | null;
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
error: Error | null;
|
||||
refetch: () => Promise<void>;
|
||||
invalidate: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching index status
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { status, isLoading, refetch } = useIndexStatus();
|
||||
* ```
|
||||
*/
|
||||
export function useIndexStatus(options: UseIndexStatusOptions = {}): UseIndexStatusReturn {
|
||||
const { staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: indexKeys.status(),
|
||||
queryFn: fetchIndexStatus,
|
||||
staleTime,
|
||||
enabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: indexKeys.all });
|
||||
};
|
||||
|
||||
return {
|
||||
status: query.data ?? null,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error,
|
||||
refetch,
|
||||
invalidate,
|
||||
};
|
||||
}
|
||||
|
||||
// ========== Mutation Hooks ==========
|
||||
|
||||
export interface UseRebuildIndexReturn {
|
||||
rebuildIndex: (request?: IndexRebuildRequest) => Promise<IndexStatus>;
|
||||
isRebuilding: boolean;
|
||||
error: Error | null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for rebuilding index
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { rebuildIndex, isRebuilding } = useRebuildIndex();
|
||||
*
|
||||
* const handleRebuild = async () => {
|
||||
* await rebuildIndex({ force: true });
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export function useRebuildIndex(): UseRebuildIndexReturn {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: rebuildIndex,
|
||||
onSuccess: (updatedStatus) => {
|
||||
// Update the status query cache
|
||||
queryClient.setQueryData(indexKeys.status(), updatedStatus);
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
rebuildIndex: mutation.mutateAsync,
|
||||
isRebuilding: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Combined hook for all index operations
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const {
|
||||
* status,
|
||||
* isLoading,
|
||||
* rebuildIndex,
|
||||
* isRebuilding,
|
||||
* } = useIndex();
|
||||
* ```
|
||||
*/
|
||||
export function useIndex() {
|
||||
const status = useIndexStatus();
|
||||
const rebuild = useRebuildIndex();
|
||||
|
||||
return {
|
||||
...status,
|
||||
rebuildIndex: rebuild.rebuildIndex,
|
||||
isRebuilding: rebuild.isRebuilding,
|
||||
rebuildError: rebuild.error,
|
||||
};
|
||||
}
|
||||
@@ -14,6 +14,8 @@ import {
|
||||
type Issue,
|
||||
type IssuesResponse,
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
// Query key factory
|
||||
export const issuesKeys = {
|
||||
@@ -63,23 +65,27 @@ export interface UseIssuesReturn {
|
||||
* Hook for fetching and filtering issues
|
||||
*/
|
||||
export function useIssues(options: UseIssuesOptions = {}): UseIssuesReturn {
|
||||
const { filter, projectPath, staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const { filter, staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const issuesQuery = useQuery({
|
||||
queryKey: issuesKeys.list(filter),
|
||||
queryKey: workspaceQueryKeys.issuesList(projectPath),
|
||||
queryFn: () => fetchIssues(projectPath),
|
||||
staleTime,
|
||||
enabled,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
const historyQuery = useQuery({
|
||||
queryKey: issuesKeys.history(),
|
||||
queryKey: workspaceQueryKeys.issuesHistory(projectPath),
|
||||
queryFn: () => fetchIssueHistory(projectPath),
|
||||
staleTime,
|
||||
enabled: enabled && (filter?.includeHistory ?? false),
|
||||
enabled: queryEnabled && (filter?.includeHistory ?? false),
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
@@ -151,7 +157,9 @@ export function useIssues(options: UseIssuesOptions = {}): UseIssuesReturn {
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: issuesKeys.all });
|
||||
if (projectPath) {
|
||||
await queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.issues(projectPath) });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -173,11 +181,13 @@ export function useIssues(options: UseIssuesOptions = {}): UseIssuesReturn {
|
||||
/**
|
||||
* Hook for fetching issue queue
|
||||
*/
|
||||
export function useIssueQueue(projectPath?: string) {
|
||||
export function useIssueQueue(): ReturnType<typeof useQuery> {
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
return useQuery({
|
||||
queryKey: issuesKeys.queue(),
|
||||
queryKey: projectPath ? workspaceQueryKeys.issueQueue(projectPath) : ['issueQueue', 'no-project'],
|
||||
queryFn: () => fetchIssueQueue(projectPath),
|
||||
staleTime: STALE_TIME,
|
||||
enabled: !!projectPath,
|
||||
retry: 2,
|
||||
});
|
||||
}
|
||||
@@ -192,16 +202,13 @@ export interface UseCreateIssueReturn {
|
||||
|
||||
export function useCreateIssue(): UseCreateIssueReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: createIssue,
|
||||
onSuccess: (newIssue) => {
|
||||
queryClient.setQueryData<IssuesResponse>(issuesKeys.list(), (old) => {
|
||||
if (!old) return { issues: [newIssue] };
|
||||
return {
|
||||
issues: [newIssue, ...old.issues],
|
||||
};
|
||||
});
|
||||
onSuccess: () => {
|
||||
// Invalidate issues cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.issues(projectPath) : ['issues'] });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -220,17 +227,14 @@ export interface UseUpdateIssueReturn {
|
||||
|
||||
export function useUpdateIssue(): UseUpdateIssueReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ issueId, input }: { issueId: string; input: Partial<Issue> }) =>
|
||||
updateIssue(issueId, input),
|
||||
onSuccess: (updatedIssue) => {
|
||||
queryClient.setQueryData<IssuesResponse>(issuesKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
issues: old.issues.map((i) => (i.id === updatedIssue.id ? updatedIssue : i)),
|
||||
};
|
||||
});
|
||||
onSuccess: () => {
|
||||
// Invalidate issues cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.issues(projectPath) : ['issues'] });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -249,29 +253,13 @@ export interface UseDeleteIssueReturn {
|
||||
|
||||
export function useDeleteIssue(): UseDeleteIssueReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: deleteIssue,
|
||||
onMutate: async (issueId) => {
|
||||
await queryClient.cancelQueries({ queryKey: issuesKeys.all });
|
||||
const previousIssues = queryClient.getQueryData<IssuesResponse>(issuesKeys.list());
|
||||
|
||||
queryClient.setQueryData<IssuesResponse>(issuesKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
issues: old.issues.filter((i) => i.id !== issueId),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousIssues };
|
||||
},
|
||||
onError: (_error, _issueId, context) => {
|
||||
if (context?.previousIssues) {
|
||||
queryClient.setQueryData(issuesKeys.list(), context.previousIssues);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: issuesKeys.all });
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.issues(projectPath) : ['issues'] });
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -12,6 +12,8 @@ import {
|
||||
type CoreMemory,
|
||||
type MemoryResponse,
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
// Query key factory
|
||||
export const memoryKeys = {
|
||||
@@ -54,12 +56,16 @@ export interface UseMemoryReturn {
|
||||
export function useMemory(options: UseMemoryOptions = {}): UseMemoryReturn {
|
||||
const { filter, staleTime = STALE_TIME, enabled = true } = options;
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: memoryKeys.list(filter),
|
||||
queryKey: workspaceQueryKeys.memoryList(projectPath),
|
||||
queryFn: fetchMemories,
|
||||
staleTime,
|
||||
enabled,
|
||||
enabled: queryEnabled,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
@@ -100,7 +106,9 @@ export function useMemory(options: UseMemoryOptions = {}): UseMemoryReturn {
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: memoryKeys.all });
|
||||
if (projectPath) {
|
||||
await queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.memory(projectPath) });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -126,18 +134,13 @@ export interface UseCreateMemoryReturn {
|
||||
|
||||
export function useCreateMemory(): UseCreateMemoryReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: createMemory,
|
||||
onSuccess: (newMemory) => {
|
||||
queryClient.setQueryData<MemoryResponse>(memoryKeys.list(), (old) => {
|
||||
if (!old) return { memories: [newMemory], totalSize: 0, claudeMdCount: 0 };
|
||||
return {
|
||||
...old,
|
||||
memories: [newMemory, ...old.memories],
|
||||
totalSize: old.totalSize + (newMemory.size ?? 0),
|
||||
};
|
||||
});
|
||||
onSuccess: () => {
|
||||
// Invalidate memory cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.memory(projectPath) : ['memory'] });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -156,20 +159,14 @@ export interface UseUpdateMemoryReturn {
|
||||
|
||||
export function useUpdateMemory(): UseUpdateMemoryReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ memoryId, input }: { memoryId: string; input: Partial<CoreMemory> }) =>
|
||||
updateMemory(memoryId, input),
|
||||
onSuccess: (updatedMemory) => {
|
||||
queryClient.setQueryData<MemoryResponse>(memoryKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
...old,
|
||||
memories: old.memories.map((m) =>
|
||||
m.id === updatedMemory.id ? updatedMemory : m
|
||||
),
|
||||
};
|
||||
});
|
||||
onSuccess: () => {
|
||||
// Invalidate memory cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.memory(projectPath) : ['memory'] });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -188,32 +185,13 @@ export interface UseDeleteMemoryReturn {
|
||||
|
||||
export function useDeleteMemory(): UseDeleteMemoryReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: deleteMemory,
|
||||
onMutate: async (memoryId) => {
|
||||
await queryClient.cancelQueries({ queryKey: memoryKeys.all });
|
||||
const previousMemories = queryClient.getQueryData<MemoryResponse>(memoryKeys.list());
|
||||
|
||||
queryClient.setQueryData<MemoryResponse>(memoryKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
const removedMemory = old.memories.find((m) => m.id === memoryId);
|
||||
return {
|
||||
...old,
|
||||
memories: old.memories.filter((m) => m.id !== memoryId),
|
||||
totalSize: old.totalSize - (removedMemory?.size ?? 0),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousMemories };
|
||||
},
|
||||
onError: (_error, _memoryId, context) => {
|
||||
if (context?.previousMemories) {
|
||||
queryClient.setQueryData(memoryKeys.list(), context.previousMemories);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: memoryKeys.all });
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.memory(projectPath) : ['memory'] });
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
254
ccw/frontend/src/hooks/usePromptHistory.ts
Normal file
254
ccw/frontend/src/hooks/usePromptHistory.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
// ========================================
|
||||
// usePromptHistory Hook
|
||||
// ========================================
|
||||
// TanStack Query hooks for prompt history with real-time updates
|
||||
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import {
|
||||
fetchPrompts,
|
||||
fetchPromptInsights,
|
||||
analyzePrompts,
|
||||
deletePrompt,
|
||||
type Prompt,
|
||||
type PromptInsight,
|
||||
type Pattern,
|
||||
type Suggestion,
|
||||
type PromptsResponse,
|
||||
type PromptInsightsResponse,
|
||||
} from '../lib/api';
|
||||
|
||||
// Query key factory
|
||||
export const promptHistoryKeys = {
|
||||
all: ['promptHistory'] as const,
|
||||
lists: () => [...promptHistoryKeys.all, 'list'] as const,
|
||||
list: (filters?: PromptHistoryFilter) => [...promptHistoryKeys.lists(), filters] as const,
|
||||
insights: () => [...promptHistoryKeys.all, 'insights'] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 30 seconds (prompts update less frequently)
|
||||
const STALE_TIME = 30 * 1000;
|
||||
|
||||
export interface PromptHistoryFilter {
|
||||
search?: string;
|
||||
intent?: string;
|
||||
dateRange?: { start: Date | null; end: Date | null };
|
||||
}
|
||||
|
||||
export interface UsePromptHistoryOptions {
|
||||
filter?: PromptHistoryFilter;
|
||||
staleTime?: number;
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
export interface UsePromptHistoryReturn {
|
||||
prompts: Prompt[];
|
||||
totalPrompts: number;
|
||||
promptsBySession: Record<string, Prompt[]>;
|
||||
stats: {
|
||||
totalCount: number;
|
||||
avgLength: number;
|
||||
topIntent: string | null;
|
||||
};
|
||||
isLoading: boolean;
|
||||
isFetching: boolean;
|
||||
error: Error | null;
|
||||
refetch: () => Promise<void>;
|
||||
invalidate: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching and filtering prompt history
|
||||
*/
|
||||
export function usePromptHistory(options: UsePromptHistoryOptions = {}): UsePromptHistoryReturn {
|
||||
const { filter, staleTime = STALE_TIME, enabled = true } = options;
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: promptHistoryKeys.list(filter),
|
||||
queryFn: fetchPrompts,
|
||||
staleTime,
|
||||
enabled,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
const allPrompts = query.data?.prompts ?? [];
|
||||
const totalCount = query.data?.total ?? 0;
|
||||
|
||||
// Apply filters
|
||||
const filteredPrompts = (() => {
|
||||
let prompts = allPrompts;
|
||||
|
||||
if (filter?.search) {
|
||||
const searchLower = filter.search.toLowerCase();
|
||||
prompts = prompts.filter(
|
||||
(p) =>
|
||||
p.title?.toLowerCase().includes(searchLower) ||
|
||||
p.content.toLowerCase().includes(searchLower) ||
|
||||
p.tags?.some((t) => t.toLowerCase().includes(searchLower))
|
||||
);
|
||||
}
|
||||
|
||||
if (filter?.intent) {
|
||||
prompts = prompts.filter((p) => p.category === filter.intent);
|
||||
}
|
||||
|
||||
if (filter?.dateRange?.start || filter?.dateRange?.end) {
|
||||
prompts = prompts.filter((p) => {
|
||||
const date = new Date(p.createdAt);
|
||||
const start = filter.dateRange?.start;
|
||||
const end = filter.dateRange?.end;
|
||||
if (start && date < start) return false;
|
||||
if (end && date > end) return false;
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
return prompts;
|
||||
})();
|
||||
|
||||
// Group by session for timeline view
|
||||
const promptsBySession: Record<string, Prompt[]> = {};
|
||||
for (const prompt of allPrompts) {
|
||||
const sessionKey = prompt.tags?.find((t) => t.startsWith('session:'))?.replace('session:', '') || 'ungrouped';
|
||||
if (!promptsBySession[sessionKey]) {
|
||||
promptsBySession[sessionKey] = [];
|
||||
}
|
||||
promptsBySession[sessionKey].push(prompt);
|
||||
}
|
||||
|
||||
// Calculate stats
|
||||
const avgLength = allPrompts.length > 0
|
||||
? Math.round(allPrompts.reduce((sum, p) => sum + p.content.length, 0) / allPrompts.length)
|
||||
: 0;
|
||||
|
||||
const intentCounts: Record<string, number> = {};
|
||||
for (const prompt of allPrompts) {
|
||||
const category = prompt.category || 'uncategorized';
|
||||
intentCounts[category] = (intentCounts[category] || 0) + 1;
|
||||
}
|
||||
const topIntent = Object.entries(intentCounts).sort((a, b) => b[1] - a[1])[0]?.[0] || null;
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: promptHistoryKeys.all });
|
||||
};
|
||||
|
||||
return {
|
||||
prompts: filteredPrompts,
|
||||
totalPrompts: totalCount,
|
||||
promptsBySession,
|
||||
stats: {
|
||||
totalCount: allPrompts.length,
|
||||
avgLength,
|
||||
topIntent,
|
||||
},
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error,
|
||||
refetch,
|
||||
invalidate,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching prompt insights
|
||||
*/
|
||||
export function usePromptInsights(options: { enabled?: boolean; staleTime?: number } = {}) {
|
||||
const { enabled = true, staleTime = STALE_TIME } = options;
|
||||
|
||||
return useQuery({
|
||||
queryKey: promptHistoryKeys.insights(),
|
||||
queryFn: fetchPromptInsights,
|
||||
staleTime,
|
||||
enabled,
|
||||
retry: 2,
|
||||
});
|
||||
}
|
||||
|
||||
// ========== Mutations ==========
|
||||
|
||||
export interface UseAnalyzePromptsReturn {
|
||||
analyzePrompts: (request?: { tool?: 'gemini' | 'qwen' | 'codex'; limit?: number }) => Promise<PromptInsightsResponse>;
|
||||
isAnalyzing: boolean;
|
||||
error: Error | null;
|
||||
}
|
||||
|
||||
export function useAnalyzePrompts(): UseAnalyzePromptsReturn {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: analyzePrompts,
|
||||
onSuccess: () => {
|
||||
// Invalidate insights query after analysis
|
||||
queryClient.invalidateQueries({ queryKey: promptHistoryKeys.insights() });
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
analyzePrompts: mutation.mutateAsync,
|
||||
isAnalyzing: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
export interface UseDeletePromptReturn {
|
||||
deletePrompt: (promptId: string) => Promise<void>;
|
||||
isDeleting: boolean;
|
||||
error: Error | null;
|
||||
}
|
||||
|
||||
export function useDeletePrompt(): UseDeletePromptReturn {
|
||||
const queryClient = useQueryClient();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: deletePrompt,
|
||||
onMutate: async (promptId) => {
|
||||
await queryClient.cancelQueries({ queryKey: promptHistoryKeys.all });
|
||||
const previousPrompts = queryClient.getQueryData<PromptsResponse>(promptHistoryKeys.list());
|
||||
|
||||
queryClient.setQueryData<PromptsResponse>(promptHistoryKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
...old,
|
||||
prompts: old.prompts.filter((p) => p.id !== promptId),
|
||||
total: old.total - 1,
|
||||
};
|
||||
});
|
||||
|
||||
return { previousPrompts };
|
||||
},
|
||||
onError: (_error, _promptId, context) => {
|
||||
if (context?.previousPrompts) {
|
||||
queryClient.setQueryData(promptHistoryKeys.list(), context.previousPrompts);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: promptHistoryKeys.all });
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
deletePrompt: mutation.mutateAsync,
|
||||
isDeleting: mutation.isPending,
|
||||
error: mutation.error,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Combined hook for all prompt history mutations
|
||||
*/
|
||||
export function usePromptHistoryMutations() {
|
||||
const analyze = useAnalyzePrompts();
|
||||
const remove = useDeletePrompt();
|
||||
|
||||
return {
|
||||
analyzePrompts: analyze.analyzePrompts,
|
||||
deletePrompt: remove.deletePrompt,
|
||||
isAnalyzing: analyze.isAnalyzing,
|
||||
isDeleting: remove.isDeleting,
|
||||
isMutating: analyze.isAnalyzing || remove.isDeleting,
|
||||
};
|
||||
}
|
||||
@@ -16,6 +16,8 @@ import {
|
||||
} from '../lib/api';
|
||||
import type { SessionMetadata } from '../types/store';
|
||||
import { dashboardStatsKeys } from './useDashboardStats';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
// Query key factory
|
||||
export const sessionsKeys = {
|
||||
@@ -80,12 +82,16 @@ export interface UseSessionsReturn {
|
||||
export function useSessions(options: UseSessionsOptions = {}): UseSessionsReturn {
|
||||
const { filter, staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: sessionsKeys.list(filter),
|
||||
queryKey: workspaceQueryKeys.sessionsList(projectPath),
|
||||
queryFn: fetchSessions,
|
||||
staleTime,
|
||||
enabled,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
@@ -130,7 +136,7 @@ export function useSessions(options: UseSessionsOptions = {}): UseSessionsReturn
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: sessionsKeys.all });
|
||||
await queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.sessions(projectPath) });
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -163,14 +169,8 @@ export function useCreateSession(): UseCreateSessionReturn {
|
||||
const mutation = useMutation({
|
||||
mutationFn: createSession,
|
||||
onSuccess: (newSession) => {
|
||||
// Update sessions cache
|
||||
queryClient.setQueryData<SessionsResponse>(sessionsKeys.list(), (old) => {
|
||||
if (!old) return { activeSessions: [newSession], archivedSessions: [] };
|
||||
return {
|
||||
...old,
|
||||
activeSessions: [newSession, ...old.activeSessions],
|
||||
};
|
||||
});
|
||||
// Invalidate sessions cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: ['workspace'] });
|
||||
// Invalidate dashboard stats
|
||||
queryClient.invalidateQueries({ queryKey: dashboardStatsKeys.all });
|
||||
},
|
||||
@@ -198,19 +198,9 @@ export function useUpdateSession(): UseUpdateSessionReturn {
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ sessionId, input }: { sessionId: string; input: UpdateSessionInput }) =>
|
||||
updateSession(sessionId, input),
|
||||
onSuccess: (updatedSession) => {
|
||||
// Update sessions cache
|
||||
queryClient.setQueryData<SessionsResponse>(sessionsKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
activeSessions: old.activeSessions.map((s) =>
|
||||
s.session_id === updatedSession.session_id ? updatedSession : s
|
||||
),
|
||||
archivedSessions: old.archivedSessions.map((s) =>
|
||||
s.session_id === updatedSession.session_id ? updatedSession : s
|
||||
),
|
||||
};
|
||||
});
|
||||
onSuccess: () => {
|
||||
// Invalidate sessions cache to trigger refetch
|
||||
queryClient.invalidateQueries({ queryKey: ['workspace'] });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -235,43 +225,9 @@ export function useArchiveSession(): UseArchiveSessionReturn {
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: archiveSession,
|
||||
onMutate: async (sessionId) => {
|
||||
// Cancel outgoing refetches
|
||||
await queryClient.cancelQueries({ queryKey: sessionsKeys.all });
|
||||
|
||||
// Snapshot previous value
|
||||
const previousSessions = queryClient.getQueryData<SessionsResponse>(sessionsKeys.list());
|
||||
|
||||
// Optimistically update
|
||||
queryClient.setQueryData<SessionsResponse>(sessionsKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
const session = old.activeSessions.find((s) => s.session_id === sessionId);
|
||||
if (!session) return old;
|
||||
|
||||
const archivedSession: SessionMetadata = {
|
||||
...session,
|
||||
status: 'archived',
|
||||
location: 'archived',
|
||||
updated_at: new Date().toISOString(),
|
||||
};
|
||||
|
||||
return {
|
||||
activeSessions: old.activeSessions.filter((s) => s.session_id !== sessionId),
|
||||
archivedSessions: [archivedSession, ...old.archivedSessions],
|
||||
};
|
||||
});
|
||||
|
||||
return { previousSessions };
|
||||
},
|
||||
onError: (_error, _sessionId, context) => {
|
||||
// Rollback on error
|
||||
if (context?.previousSessions) {
|
||||
queryClient.setQueryData(sessionsKeys.list(), context.previousSessions);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
queryClient.invalidateQueries({ queryKey: sessionsKeys.all });
|
||||
queryClient.invalidateQueries({ queryKey: ['workspace'] });
|
||||
queryClient.invalidateQueries({ queryKey: dashboardStatsKeys.all });
|
||||
},
|
||||
});
|
||||
@@ -297,33 +253,9 @@ export function useDeleteSession(): UseDeleteSessionReturn {
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: deleteSession,
|
||||
onMutate: async (sessionId) => {
|
||||
// Cancel outgoing refetches
|
||||
await queryClient.cancelQueries({ queryKey: sessionsKeys.all });
|
||||
|
||||
// Snapshot previous value
|
||||
const previousSessions = queryClient.getQueryData<SessionsResponse>(sessionsKeys.list());
|
||||
|
||||
// Optimistically remove
|
||||
queryClient.setQueryData<SessionsResponse>(sessionsKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
activeSessions: old.activeSessions.filter((s) => s.session_id !== sessionId),
|
||||
archivedSessions: old.archivedSessions.filter((s) => s.session_id !== sessionId),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousSessions };
|
||||
},
|
||||
onError: (_error, _sessionId, context) => {
|
||||
// Rollback on error
|
||||
if (context?.previousSessions) {
|
||||
queryClient.setQueryData(sessionsKeys.list(), context.previousSessions);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
queryClient.invalidateQueries({ queryKey: sessionsKeys.all });
|
||||
queryClient.invalidateQueries({ queryKey: ['workspace'] });
|
||||
queryClient.invalidateQueries({ queryKey: dashboardStatsKeys.all });
|
||||
},
|
||||
});
|
||||
|
||||
@@ -10,6 +10,8 @@ import {
|
||||
type Skill,
|
||||
type SkillsResponse,
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
// Query key factory
|
||||
export const skillsKeys = {
|
||||
@@ -54,12 +56,16 @@ export interface UseSkillsReturn {
|
||||
export function useSkills(options: UseSkillsOptions = {}): UseSkillsReturn {
|
||||
const { filter, staleTime = STALE_TIME, enabled = true } = options;
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: skillsKeys.list(filter),
|
||||
queryKey: workspaceQueryKeys.skillsList(projectPath),
|
||||
queryFn: fetchSkills,
|
||||
staleTime,
|
||||
enabled,
|
||||
enabled: queryEnabled,
|
||||
retry: 2,
|
||||
});
|
||||
|
||||
@@ -114,7 +120,9 @@ export function useSkills(options: UseSkillsOptions = {}): UseSkillsReturn {
|
||||
};
|
||||
|
||||
const invalidate = async () => {
|
||||
await queryClient.invalidateQueries({ queryKey: skillsKeys.all });
|
||||
if (projectPath) {
|
||||
await queryClient.invalidateQueries({ queryKey: workspaceQueryKeys.skills(projectPath) });
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -142,33 +150,14 @@ export interface UseToggleSkillReturn {
|
||||
|
||||
export function useToggleSkill(): UseToggleSkillReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ skillName, enabled }: { skillName: string; enabled: boolean }) =>
|
||||
toggleSkill(skillName, enabled),
|
||||
onMutate: async ({ skillName, enabled }) => {
|
||||
await queryClient.cancelQueries({ queryKey: skillsKeys.all });
|
||||
const previousSkills = queryClient.getQueryData<SkillsResponse>(skillsKeys.list());
|
||||
|
||||
// Optimistic update
|
||||
queryClient.setQueryData<SkillsResponse>(skillsKeys.list(), (old) => {
|
||||
if (!old) return old;
|
||||
return {
|
||||
skills: old.skills.map((s) =>
|
||||
s.name === skillName ? { ...s, enabled } : s
|
||||
),
|
||||
};
|
||||
});
|
||||
|
||||
return { previousSkills };
|
||||
},
|
||||
onError: (_error, _vars, context) => {
|
||||
if (context?.previousSkills) {
|
||||
queryClient.setQueryData(skillsKeys.list(), context.previousSkills);
|
||||
}
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: skillsKeys.all });
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.skills(projectPath) : ['skills'] });
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
206
ccw/frontend/src/hooks/useSystemNotifications.ts
Normal file
206
ccw/frontend/src/hooks/useSystemNotifications.ts
Normal file
@@ -0,0 +1,206 @@
|
||||
// ========================================
|
||||
// useSystemNotifications Hook
|
||||
// ========================================
|
||||
// Browser native notification support with permission handling,
|
||||
// localStorage preference persistence, icon/badge display,
|
||||
// click-to-focus behavior, and 5-second auto-close
|
||||
|
||||
import { useState, useEffect, useCallback } from 'react';
|
||||
|
||||
// Local storage key for system notifications preference
|
||||
const SYSTEM_NOTIFICATIONS_ENABLED_KEY = 'ccw_system_notifications_enabled';
|
||||
|
||||
// Auto-close timeout for native notifications (ms)
|
||||
const NOTIFICATION_AUTO_CLOSE_MS = 5000;
|
||||
|
||||
/**
|
||||
* System notification options
|
||||
*/
|
||||
export interface SystemNotificationOptions {
|
||||
title: string;
|
||||
body?: string;
|
||||
icon?: string;
|
||||
badge?: string;
|
||||
tag?: string;
|
||||
requireInteraction?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return type for useSystemNotifications hook
|
||||
*/
|
||||
export interface UseSystemNotificationsReturn {
|
||||
enabled: boolean;
|
||||
permission: NotificationPermission;
|
||||
toggleEnabled: () => Promise<void>;
|
||||
requestPermission: () => Promise<boolean>;
|
||||
showNotification: (options: SystemNotificationOptions) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Notification API is supported
|
||||
*/
|
||||
function isNotificationSupported(): boolean {
|
||||
return typeof window !== 'undefined' && 'Notification' in window;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load system notifications enabled preference from localStorage
|
||||
*/
|
||||
function loadEnabledPreference(): boolean {
|
||||
if (typeof window === 'undefined') return false;
|
||||
|
||||
try {
|
||||
const saved = localStorage.getItem(SYSTEM_NOTIFICATIONS_ENABLED_KEY);
|
||||
return saved === 'true';
|
||||
} catch {
|
||||
console.warn('[useSystemNotifications] Failed to load preference from localStorage');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save system notifications enabled preference to localStorage
|
||||
*/
|
||||
function saveEnabledPreference(enabled: boolean): void {
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
try {
|
||||
localStorage.setItem(SYSTEM_NOTIFICATIONS_ENABLED_KEY, String(enabled));
|
||||
} catch {
|
||||
console.warn('[useSystemNotifications] Failed to save preference to localStorage');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for browser native notification support
|
||||
*
|
||||
* Features:
|
||||
* - Permission handling with browser dialog
|
||||
* - localStorage preference persistence
|
||||
* - Icon/badge display
|
||||
* - Click-to-focus window behavior
|
||||
* - 5-second auto-close
|
||||
* - Graceful handling when API unavailable
|
||||
*
|
||||
* @returns Object with enabled state, permission status, and control functions
|
||||
*/
|
||||
export function useSystemNotifications(): UseSystemNotificationsReturn {
|
||||
const [enabled, setEnabled] = useState<boolean>(() => loadEnabledPreference());
|
||||
const [permission, setPermission] = useState<NotificationPermission>(() => {
|
||||
if (!isNotificationSupported()) return 'denied';
|
||||
return Notification.permission;
|
||||
});
|
||||
|
||||
// Sync permission state with window.Notification
|
||||
useEffect(() => {
|
||||
if (!isNotificationSupported()) return;
|
||||
|
||||
const checkPermission = () => {
|
||||
setPermission(Notification.permission);
|
||||
};
|
||||
|
||||
checkPermission();
|
||||
|
||||
// Listen for permission changes (some browsers support this)
|
||||
window.addEventListener('notificationpermissionchange', checkPermission);
|
||||
return () => {
|
||||
window.removeEventListener('notificationpermissionchange', checkPermission);
|
||||
};
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Request browser notification permission
|
||||
* Prompts user with browser permission dialog
|
||||
*/
|
||||
const requestPermission = useCallback(async (): Promise<boolean> => {
|
||||
if (!isNotificationSupported()) {
|
||||
console.warn('[useSystemNotifications] Notification API not supported');
|
||||
return false;
|
||||
}
|
||||
|
||||
if (Notification.permission === 'granted') {
|
||||
setPermission('granted');
|
||||
return true;
|
||||
}
|
||||
|
||||
if (Notification.permission === 'denied') {
|
||||
setPermission('denied');
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await Notification.requestPermission();
|
||||
setPermission(result);
|
||||
return result === 'granted';
|
||||
} catch (error) {
|
||||
console.warn('[useSystemNotifications] Failed to request permission:', error);
|
||||
return false;
|
||||
}
|
||||
}, []);
|
||||
|
||||
/**
|
||||
* Toggle system notifications enabled state
|
||||
* Requests permission if not granted when enabling
|
||||
*/
|
||||
const toggleEnabled = useCallback(async (): Promise<void> => {
|
||||
if (enabled) {
|
||||
// Disabling - just update preference
|
||||
const newState = false;
|
||||
setEnabled(newState);
|
||||
saveEnabledPreference(newState);
|
||||
} else {
|
||||
// Enabling - request permission first
|
||||
const granted = await requestPermission();
|
||||
if (granted) {
|
||||
const newState = true;
|
||||
setEnabled(newState);
|
||||
saveEnabledPreference(newState);
|
||||
}
|
||||
}
|
||||
}, [enabled, requestPermission]);
|
||||
|
||||
/**
|
||||
* Show a native browser notification
|
||||
* Only shows if enabled and permission granted
|
||||
*/
|
||||
const showNotification = useCallback((options: SystemNotificationOptions) => {
|
||||
if (!enabled) return;
|
||||
if (!isNotificationSupported()) return;
|
||||
if (permission !== 'granted') return;
|
||||
|
||||
try {
|
||||
const notification = new Notification(options.title, {
|
||||
body: options.body,
|
||||
icon: options.icon || '/favicon.ico',
|
||||
badge: options.badge || '/favicon.ico',
|
||||
tag: options.tag || `ccw-notif-${Date.now()}`,
|
||||
requireInteraction: options.requireInteraction || false,
|
||||
});
|
||||
|
||||
// Click handler: focus window and close notification
|
||||
notification.onclick = () => {
|
||||
window.focus();
|
||||
notification.close();
|
||||
};
|
||||
|
||||
// Auto-close after 5 seconds (unless requireInteraction is true)
|
||||
if (!options.requireInteraction) {
|
||||
setTimeout(() => {
|
||||
notification.close();
|
||||
}, NOTIFICATION_AUTO_CLOSE_MS);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('[useSystemNotifications] Failed to show notification:', error);
|
||||
}
|
||||
}, [enabled, permission]);
|
||||
|
||||
return {
|
||||
enabled,
|
||||
permission,
|
||||
toggleEnabled,
|
||||
requestPermission,
|
||||
showNotification,
|
||||
};
|
||||
}
|
||||
|
||||
export default useSystemNotifications;
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
type OrchestratorWebSocketMessage,
|
||||
type ExecutionLog,
|
||||
} from '../types/execution';
|
||||
import { SurfaceUpdateSchema } from '../packages/a2ui-runtime/core/A2UITypes';
|
||||
|
||||
// Constants
|
||||
const RECONNECT_DELAY_BASE = 1000; // 1 second
|
||||
@@ -42,6 +43,7 @@ export function useWebSocket(options: UseWebSocketOptions = {}): UseWebSocketRet
|
||||
const setWsLastMessage = useNotificationStore((state) => state.setWsLastMessage);
|
||||
const incrementReconnectAttempts = useNotificationStore((state) => state.incrementReconnectAttempts);
|
||||
const resetReconnectAttempts = useNotificationStore((state) => state.resetReconnectAttempts);
|
||||
const addA2UINotification = useNotificationStore((state) => state.addA2UINotification);
|
||||
|
||||
// Execution store for state updates
|
||||
const setExecutionStatus = useExecutionStore((state) => state.setExecutionStatus);
|
||||
@@ -130,6 +132,17 @@ export function useWebSocket(options: UseWebSocketOptions = {}): UseWebSocketRet
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle A2UI surface messages
|
||||
if (data.type === 'a2ui-surface') {
|
||||
const parsed = SurfaceUpdateSchema.safeParse(data.payload);
|
||||
if (parsed.success) {
|
||||
addA2UINotification(parsed.data, 'Interactive UI');
|
||||
} else {
|
||||
console.warn('[WebSocket] Invalid A2UI surface:', parsed.error.issues);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if this is an orchestrator message
|
||||
if (!data.type?.startsWith('ORCHESTRATOR_')) {
|
||||
return;
|
||||
|
||||
156
ccw/frontend/src/hooks/useWebSocketNotifications.ts
Normal file
156
ccw/frontend/src/hooks/useWebSocketNotifications.ts
Normal file
@@ -0,0 +1,156 @@
|
||||
// ========================================
|
||||
// useWebSocketNotifications Hook
|
||||
// ========================================
|
||||
// Watches wsLastMessage from notificationStore and maps WebSocket events
|
||||
// to persistent notifications for the notification panel
|
||||
|
||||
import { useEffect } from 'react';
|
||||
import { useNotificationStore } from '@/stores';
|
||||
import type { WebSocketMessage } from '@/types/store';
|
||||
|
||||
// WebSocket message types that should create persistent notifications
|
||||
type NotificationEventType =
|
||||
| 'SESSION_CREATED'
|
||||
| 'TASK_COMPLETED'
|
||||
| 'TASK_FAILED'
|
||||
| 'CLI_EXECUTION_STARTED'
|
||||
| 'CLI_EXECUTION_COMPLETED'
|
||||
| 'MEMORY_UPDATED';
|
||||
|
||||
interface SessionCreatedPayload {
|
||||
sessionId: string;
|
||||
title?: string;
|
||||
}
|
||||
|
||||
interface TaskEventPayload {
|
||||
sessionId?: string;
|
||||
taskId?: string;
|
||||
summary?: string;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface CliExecutionPayload {
|
||||
executionId: string;
|
||||
tool: string;
|
||||
duration?: number;
|
||||
}
|
||||
|
||||
interface MemoryUpdatedPayload {
|
||||
memoryId?: string;
|
||||
operation?: string;
|
||||
}
|
||||
|
||||
export function useWebSocketNotifications(): void {
|
||||
const wsLastMessage = useNotificationStore((state) => state.wsLastMessage);
|
||||
const setWsLastMessage = useNotificationStore((state) => state.setWsLastMessage);
|
||||
const addPersistentNotification = useNotificationStore(
|
||||
(state) => state.addPersistentNotification
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
// Only process when we have a message
|
||||
if (!wsLastMessage) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { type, payload } = wsLastMessage as WebSocketMessage & {
|
||||
payload?: unknown;
|
||||
};
|
||||
|
||||
// Route message type to appropriate notification
|
||||
switch (type as NotificationEventType) {
|
||||
case 'SESSION_CREATED': {
|
||||
const data = payload as SessionCreatedPayload | undefined;
|
||||
const sessionId = data?.sessionId || 'unknown';
|
||||
const title = data?.title ? `"${data.title}"` : '';
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'info',
|
||||
title: 'Session Created',
|
||||
message: `New session ${title}created (${sessionId})`,
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'TASK_COMPLETED': {
|
||||
const data = payload as TaskEventPayload | undefined;
|
||||
const summary = data?.summary || 'Task completed successfully';
|
||||
const taskId = data?.taskId;
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'success',
|
||||
title: 'Task Completed',
|
||||
message: taskId ? `${summary} (${taskId})` : summary,
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'TASK_FAILED': {
|
||||
const data = payload as TaskEventPayload | undefined;
|
||||
const error = data?.error || 'Task execution failed';
|
||||
const taskId = data?.taskId;
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'error',
|
||||
title: 'Task Failed',
|
||||
message: taskId ? `${error} (${taskId})` : error,
|
||||
duration: 0, // Errors don't auto-dismiss
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'CLI_EXECUTION_STARTED': {
|
||||
const data = payload as CliExecutionPayload | undefined;
|
||||
const tool = data?.tool || 'CLI';
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'info',
|
||||
title: 'CLI Execution Started',
|
||||
message: `${tool} execution started`,
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'CLI_EXECUTION_COMPLETED': {
|
||||
const data = payload as CliExecutionPayload | undefined;
|
||||
const tool = data?.tool || 'CLI';
|
||||
const duration = data?.duration;
|
||||
const durationText = duration ? ` (${duration}ms)` : '';
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'success',
|
||||
title: 'CLI Execution Completed',
|
||||
message: `${tool} execution completed${durationText}`,
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case 'MEMORY_UPDATED': {
|
||||
const data = payload as MemoryUpdatedPayload | undefined;
|
||||
const operation = data?.operation || 'update';
|
||||
|
||||
addPersistentNotification({
|
||||
type: 'info',
|
||||
title: 'Memory Updated',
|
||||
message: `Memory ${operation} completed`,
|
||||
read: false,
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
// Unknown message type - ignore
|
||||
break;
|
||||
}
|
||||
|
||||
// Clear the message after processing to prevent duplicate handling
|
||||
setWsLastMessage(null);
|
||||
}, [wsLastMessage, addPersistentNotification, setWsLastMessage]);
|
||||
}
|
||||
|
||||
export default useWebSocketNotifications;
|
||||
100
ccw/frontend/src/hooks/useWorkspaceQueryKeys.ts
Normal file
100
ccw/frontend/src/hooks/useWorkspaceQueryKeys.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
// ========================================
|
||||
// useWorkspaceQueryKeys Hook
|
||||
// ========================================
|
||||
// Returns workspace-aware query keys factory with current projectPath
|
||||
|
||||
import { useMemo } from 'react';
|
||||
import { useWorkflowStore } from '../stores/workflowStore';
|
||||
import { selectProjectPath } from '../stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '../lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Hook that returns workspace-aware query keys factory
|
||||
* All keys are memoized and update when projectPath changes
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const queryKeys = useWorkspaceQueryKeys();
|
||||
* const { data } = useQuery({
|
||||
* queryKey: queryKeys.sessionsList(),
|
||||
* queryFn: fetchSessions,
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function useWorkspaceQueryKeys() {
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Memoize all key factory functions to recreate only when projectPath changes
|
||||
const keys = useMemo(() => {
|
||||
const pk = projectPath || '';
|
||||
|
||||
return {
|
||||
// Base keys
|
||||
all: workspaceQueryKeys.all(pk),
|
||||
|
||||
// Sessions
|
||||
sessionsList: workspaceQueryKeys.sessionsList(pk),
|
||||
sessionDetail: (sessionId: string) => workspaceQueryKeys.sessionDetail(pk, sessionId),
|
||||
|
||||
// Tasks
|
||||
tasksList: (sessionId: string) => workspaceQueryKeys.tasksList(pk, sessionId),
|
||||
taskDetail: (taskId: string) => workspaceQueryKeys.taskDetail(pk, taskId),
|
||||
|
||||
// Loops
|
||||
loopsList: workspaceQueryKeys.loopsList(pk),
|
||||
loopDetail: (loopId: string) => workspaceQueryKeys.loopDetail(pk, loopId),
|
||||
|
||||
// Issues
|
||||
issuesList: workspaceQueryKeys.issuesList(pk),
|
||||
issuesHistory: workspaceQueryKeys.issuesHistory(pk),
|
||||
issueQueue: workspaceQueryKeys.issueQueue(pk),
|
||||
|
||||
// Memory
|
||||
memoryList: workspaceQueryKeys.memoryList(pk),
|
||||
memoryDetail: (memoryId: string) => workspaceQueryKeys.memoryDetail(pk, memoryId),
|
||||
|
||||
// Project Overview
|
||||
projectOverviewDetail: workspaceQueryKeys.projectOverviewDetail(pk),
|
||||
|
||||
// Lite Tasks
|
||||
liteTasksList: (type?: 'lite-plan' | 'lite-fix' | 'multi-cli-plan') =>
|
||||
workspaceQueryKeys.liteTasksList(pk, type),
|
||||
liteTaskDetail: (sessionId: string) => workspaceQueryKeys.liteTaskDetail(pk, sessionId),
|
||||
|
||||
// Review Sessions
|
||||
reviewSessionsList: workspaceQueryKeys.reviewSessionsList(pk),
|
||||
reviewSessionDetail: (sessionId: string) => workspaceQueryKeys.reviewSessionDetail(pk, sessionId),
|
||||
|
||||
// Rules
|
||||
rulesList: workspaceQueryKeys.rulesList(pk),
|
||||
|
||||
// Prompts
|
||||
promptsList: workspaceQueryKeys.promptsList(pk),
|
||||
promptsInsights: workspaceQueryKeys.promptsInsights(pk),
|
||||
|
||||
// Index
|
||||
indexStatus: workspaceQueryKeys.indexStatus(pk),
|
||||
|
||||
// File Explorer
|
||||
explorerTree: (rootPath?: string) => workspaceQueryKeys.explorerTree(pk, rootPath),
|
||||
explorerFile: (filePath?: string) => workspaceQueryKeys.explorerFile(pk, filePath),
|
||||
|
||||
// Graph Explorer
|
||||
graphDependencies: (options?: { maxDepth?: number }) =>
|
||||
workspaceQueryKeys.graphDependencies(pk, options),
|
||||
graphImpact: (nodeId: string) => workspaceQueryKeys.graphImpact(pk, nodeId),
|
||||
|
||||
// CLI History
|
||||
cliHistoryList: workspaceQueryKeys.cliHistoryList(pk),
|
||||
cliExecutionDetail: (executionId: string) =>
|
||||
workspaceQueryKeys.cliExecutionDetail(pk, executionId),
|
||||
};
|
||||
}, [projectPath]);
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
/**
|
||||
* Type for the return value of useWorkspaceQueryKeys
|
||||
*/
|
||||
export type WorkspaceQueryKeys = ReturnType<typeof useWorkspaceQueryKeys>;
|
||||
Reference in New Issue
Block a user