mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: initialize monorepo with package.json for CCW workflow platform
This commit is contained in:
372
ccw/frontend/src/hooks/__tests__/chartHooksIntegration.test.tsx
Normal file
372
ccw/frontend/src/hooks/__tests__/chartHooksIntegration.test.tsx
Normal file
@@ -0,0 +1,372 @@
|
||||
// ========================================
|
||||
// Chart Hooks Integration Tests
|
||||
// ========================================
|
||||
// Integration tests for TanStack Query hooks: useWorkflowStatusCounts, useActivityTimeline, useTaskTypeCounts with workspace scoping
|
||||
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { renderHook, waitFor } from '@testing-library/react';
|
||||
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
|
||||
import * as React from 'react';
|
||||
import { useWorkflowStatusCounts } from '@/hooks/useWorkflowStatusCounts';
|
||||
import { useActivityTimeline } from '@/hooks/useActivityTimeline';
|
||||
import { useTaskTypeCounts } from '@/hooks/useTaskTypeCounts';
|
||||
|
||||
// Mock API
|
||||
const mockApi = {
|
||||
get: vi.fn(),
|
||||
};
|
||||
|
||||
vi.mock('@/lib/api', () => ({
|
||||
api: {
|
||||
get: (...args: any[]) => mockApi.get(...args),
|
||||
},
|
||||
}));
|
||||
|
||||
describe('Chart Hooks Integration Tests', () => {
|
||||
let queryClient: QueryClient;
|
||||
|
||||
const wrapper = ({ children }: { children: React.ReactNode }) => (
|
||||
<QueryClientProvider client={queryClient}>{children}</QueryClientProvider>
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: {
|
||||
retry: false,
|
||||
gcTime: 0,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
mockApi.get.mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
queryClient.clear();
|
||||
});
|
||||
|
||||
describe('useWorkflowStatusCounts', () => {
|
||||
it('CHI-1.1 - should fetch workflow status counts successfully', async () => {
|
||||
const mockData = [
|
||||
{ status: 'completed', count: 30, percentage: 60 },
|
||||
{ status: 'in_progress', count: 10, percentage: 20 },
|
||||
{ status: 'pending', count: 10, percentage: 20 },
|
||||
];
|
||||
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.data).toEqual(mockData);
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/session-status-counts');
|
||||
});
|
||||
|
||||
it('CHI-1.2 - should apply workspace scoping to query', async () => {
|
||||
const mockData = [{ status: 'completed', count: 5, percentage: 100 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(
|
||||
() => useWorkflowStatusCounts({ projectPath: '/test/workspace' }),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/session-status-counts', {
|
||||
params: { workspace: '/test/workspace' },
|
||||
});
|
||||
});
|
||||
|
||||
it('CHI-1.3 - should handle API errors gracefully', async () => {
|
||||
mockApi.get.mockRejectedValue(new Error('API Error'));
|
||||
|
||||
const { result } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isError).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.error).toBeDefined();
|
||||
expect(result.current.data).toBeUndefined();
|
||||
});
|
||||
|
||||
it('CHI-1.4 - should cache results with TanStack Query', async () => {
|
||||
const mockData = [{ status: 'completed', count: 10, percentage: 100 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result: result1 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
await waitFor(() => expect(result1.current.isSuccess).toBe(true));
|
||||
|
||||
// Second render should use cache
|
||||
const { result: result2 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result2.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
// API should only be called once (cached)
|
||||
expect(mockApi.get).toHaveBeenCalledTimes(1);
|
||||
expect(result2.current.data).toEqual(mockData);
|
||||
});
|
||||
|
||||
it('CHI-1.5 - should support manual refetch', async () => {
|
||||
const mockData = [{ status: 'completed', count: 10, percentage: 100 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
|
||||
// Refetch
|
||||
await result.current.refetch();
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useActivityTimeline', () => {
|
||||
it('CHI-2.1 - should fetch activity timeline with default date range', async () => {
|
||||
const mockData = [
|
||||
{ date: '2026-02-01', sessions: 5, tasks: 20 },
|
||||
{ date: '2026-02-02', sessions: 8, tasks: 35 },
|
||||
];
|
||||
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(() => useActivityTimeline(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.data).toEqual(mockData);
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/activity-timeline');
|
||||
});
|
||||
|
||||
it('CHI-2.2 - should accept custom date range parameters', async () => {
|
||||
const mockData = [{ date: '2026-01-01', sessions: 3, tasks: 10 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const dateRange = {
|
||||
start: new Date('2026-01-01'),
|
||||
end: new Date('2026-01-31'),
|
||||
};
|
||||
|
||||
const { result } = renderHook(() => useActivityTimeline(dateRange), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/activity-timeline', {
|
||||
params: {
|
||||
startDate: dateRange.start.toISOString(),
|
||||
endDate: dateRange.end.toISOString(),
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('CHI-2.3 - should handle empty timeline data', async () => {
|
||||
mockApi.get.mockResolvedValue({ data: [] });
|
||||
|
||||
const { result } = renderHook(() => useActivityTimeline(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.data).toEqual([]);
|
||||
});
|
||||
|
||||
it('CHI-2.4 - should apply workspace scoping', async () => {
|
||||
const mockData = [{ date: '2026-02-01', sessions: 2, tasks: 8 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(
|
||||
() => useActivityTimeline(undefined, '/test/workspace'),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/activity-timeline', {
|
||||
params: { workspace: '/test/workspace' },
|
||||
});
|
||||
});
|
||||
|
||||
it('CHI-2.5 - should invalidate cache on workspace change', async () => {
|
||||
const mockData1 = [{ date: '2026-02-01', sessions: 5, tasks: 20 }];
|
||||
const mockData2 = [{ date: '2026-02-01', sessions: 3, tasks: 10 }];
|
||||
|
||||
mockApi.get.mockResolvedValueOnce({ data: mockData1 });
|
||||
|
||||
const { result, rerender } = renderHook(
|
||||
({ workspace }: { workspace?: string }) => useActivityTimeline(undefined, workspace),
|
||||
{ wrapper, initialProps: { workspace: '/workspace1' } }
|
||||
);
|
||||
|
||||
await waitFor(() => expect(result.current.isSuccess).toBe(true));
|
||||
expect(result.current.data).toEqual(mockData1);
|
||||
|
||||
// Change workspace
|
||||
mockApi.get.mockResolvedValueOnce({ data: mockData2 });
|
||||
rerender({ workspace: '/workspace2' });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.data).toEqual(mockData2);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('useTaskTypeCounts', () => {
|
||||
it('CHI-3.1 - should fetch task type counts successfully', async () => {
|
||||
const mockData = [
|
||||
{ type: 'feature', count: 45 },
|
||||
{ type: 'bugfix', count: 30 },
|
||||
{ type: 'refactor', count: 15 },
|
||||
];
|
||||
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(() => useTaskTypeCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.data).toEqual(mockData);
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/task-type-counts');
|
||||
});
|
||||
|
||||
it('CHI-3.2 - should apply workspace scoping', async () => {
|
||||
const mockData = [{ type: 'feature', count: 10 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(
|
||||
() => useTaskTypeCounts({ projectPath: '/test/workspace' }),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledWith('/api/task-type-counts', {
|
||||
params: { workspace: '/test/workspace' },
|
||||
});
|
||||
});
|
||||
|
||||
it('CHI-3.3 - should handle zero counts', async () => {
|
||||
const mockData = [
|
||||
{ type: 'feature', count: 0 },
|
||||
{ type: 'bugfix', count: 0 },
|
||||
];
|
||||
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(() => useTaskTypeCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(result.current.data).toEqual(mockData);
|
||||
});
|
||||
|
||||
it('CHI-3.4 - should support staleTime configuration', async () => {
|
||||
const mockData = [{ type: 'feature', count: 5 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
const { result } = renderHook(
|
||||
() => useTaskTypeCounts({ staleTime: 30000 }),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
// Data should be fresh for 30s
|
||||
expect(result.current.isStale).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Multi-Hook Integration', () => {
|
||||
it('CHI-4.1 - should load all chart hooks concurrently', async () => {
|
||||
mockApi.get.mockImplementation((url: string) => {
|
||||
const data: Record<string, any> = {
|
||||
'/api/session-status-counts': [{ status: 'completed', count: 10, percentage: 100 }],
|
||||
'/api/activity-timeline': [{ date: '2026-02-01', sessions: 5, tasks: 20 }],
|
||||
'/api/task-type-counts': [{ type: 'feature', count: 15 }],
|
||||
};
|
||||
return Promise.resolve({ data: data[url] });
|
||||
});
|
||||
|
||||
const { result: result1 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
const { result: result2 } = renderHook(() => useActivityTimeline(), { wrapper });
|
||||
const { result: result3 } = renderHook(() => useTaskTypeCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result1.current.isSuccess).toBe(true);
|
||||
expect(result2.current.isSuccess).toBe(true);
|
||||
expect(result3.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
expect(mockApi.get).toHaveBeenCalledTimes(3);
|
||||
});
|
||||
|
||||
it('CHI-4.2 - should handle partial failures gracefully', async () => {
|
||||
mockApi.get.mockImplementation((url: string) => {
|
||||
if (url === '/api/session-status-counts') {
|
||||
return Promise.reject(new Error('Failed'));
|
||||
}
|
||||
return Promise.resolve({
|
||||
data: url === '/api/activity-timeline'
|
||||
? [{ date: '2026-02-01', sessions: 5, tasks: 20 }]
|
||||
: [{ type: 'feature', count: 15 }],
|
||||
});
|
||||
});
|
||||
|
||||
const { result: result1 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
const { result: result2 } = renderHook(() => useActivityTimeline(), { wrapper });
|
||||
const { result: result3 } = renderHook(() => useTaskTypeCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result1.current.isError).toBe(true);
|
||||
expect(result2.current.isSuccess).toBe(true);
|
||||
expect(result3.current.isSuccess).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
it('CHI-4.3 - should share cache across multiple components', async () => {
|
||||
const mockData = [{ status: 'completed', count: 10, percentage: 100 }];
|
||||
mockApi.get.mockResolvedValue({ data: mockData });
|
||||
|
||||
// First component
|
||||
const { result: result1 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
await waitFor(() => expect(result1.current.isSuccess).toBe(true));
|
||||
|
||||
// Second component should use cache
|
||||
const { result: result2 } = renderHook(() => useWorkflowStatusCounts(), { wrapper });
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result2.current.isSuccess).toBe(true);
|
||||
});
|
||||
|
||||
// Only one API call
|
||||
expect(mockApi.get).toHaveBeenCalledTimes(1);
|
||||
expect(result1.current.data).toEqual(result2.current.data);
|
||||
});
|
||||
});
|
||||
});
|
||||
157
ccw/frontend/src/hooks/useActivityTimeline.ts
Normal file
157
ccw/frontend/src/hooks/useActivityTimeline.ts
Normal file
@@ -0,0 +1,157 @@
|
||||
// ========================================
|
||||
// useActivityTimeline Hook
|
||||
// ========================================
|
||||
// TanStack Query hook for fetching activity timeline data
|
||||
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
|
||||
/**
|
||||
* Activity timeline data point structure
|
||||
*/
|
||||
export interface ActivityTimelineData {
|
||||
date: string; // ISO date string (YYYY-MM-DD)
|
||||
sessions: number;
|
||||
tasks: number;
|
||||
}
|
||||
|
||||
// Query key factory
|
||||
export const activityTimelineKeys = {
|
||||
all: ['activityTimeline'] as const,
|
||||
detail: (projectPath: string, start: string, end: string) =>
|
||||
[...activityTimelineKeys.all, 'detail', projectPath, start, end] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 30 seconds
|
||||
const STALE_TIME = 30 * 1000;
|
||||
|
||||
export interface DateRange {
|
||||
start: Date;
|
||||
end: Date;
|
||||
}
|
||||
|
||||
export interface UseActivityTimelineOptions {
|
||||
/** Date range for the timeline (default: last 7 days) */
|
||||
dateRange?: DateRange;
|
||||
/** Override default stale time (ms) */
|
||||
staleTime?: number;
|
||||
/** Enable/disable the query */
|
||||
enabled?: boolean;
|
||||
/** Refetch interval (ms), 0 to disable */
|
||||
refetchInterval?: number;
|
||||
}
|
||||
|
||||
export interface UseActivityTimelineReturn {
|
||||
/** Activity timeline data */
|
||||
data: ActivityTimelineData[] | undefined;
|
||||
/** Loading state for initial fetch */
|
||||
isLoading: boolean;
|
||||
/** Fetching state (initial or refetch) */
|
||||
isFetching: boolean;
|
||||
/** Error object if query failed */
|
||||
error: Error | null;
|
||||
/** Whether data is stale */
|
||||
isStale: boolean;
|
||||
/** Manually refetch data */
|
||||
refetch: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get default date range (last 7 days)
|
||||
*/
|
||||
function getDefaultDateRange(): DateRange {
|
||||
const end = new Date();
|
||||
const start = new Date();
|
||||
start.setDate(start.getDate() - 7);
|
||||
return { start, end };
|
||||
}
|
||||
|
||||
/**
|
||||
* Format date to ISO date string (YYYY-MM-DD)
|
||||
*/
|
||||
function formatDate(date: Date): string {
|
||||
return date.toISOString().split('T')[0];
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching activity timeline data
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { data, isLoading, error } = useActivityTimeline();
|
||||
*
|
||||
* if (isLoading) return <ChartSkeleton />;
|
||||
* if (error) return <ErrorMessage error={error} />;
|
||||
*
|
||||
* return <ActivityLineChart data={data} />;
|
||||
* ```
|
||||
*/
|
||||
export function useActivityTimeline(
|
||||
options: UseActivityTimelineOptions = {}
|
||||
): UseActivityTimelineReturn {
|
||||
const {
|
||||
dateRange = getDefaultDateRange(),
|
||||
staleTime = STALE_TIME,
|
||||
enabled = true,
|
||||
refetchInterval = 0,
|
||||
} = options;
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const startStr = formatDate(dateRange.start);
|
||||
const endStr = formatDate(dateRange.end);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: activityTimelineKeys.detail(projectPath || '', startStr, endStr),
|
||||
queryFn: async () => {
|
||||
if (!projectPath) throw new Error('Project path is required');
|
||||
|
||||
// TODO: Replace with actual API endpoint once backend is ready
|
||||
const response = await fetch(
|
||||
`/api/activity-timeline?projectPath=${encodeURIComponent(projectPath)}&start=${startStr}&end=${endStr}`
|
||||
);
|
||||
if (!response.ok) throw new Error('Failed to fetch activity timeline');
|
||||
return response.json() as Promise<ActivityTimelineData[]>;
|
||||
},
|
||||
staleTime,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
});
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
return {
|
||||
data: query.data,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error,
|
||||
isStale: query.isStale,
|
||||
refetch,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock data generator for development/testing
|
||||
*/
|
||||
export function generateMockActivityTimeline(days: number = 7): ActivityTimelineData[] {
|
||||
const data: ActivityTimelineData[] = [];
|
||||
const today = new Date();
|
||||
|
||||
for (let i = days - 1; i >= 0; i--) {
|
||||
const date = new Date(today);
|
||||
date.setDate(date.getDate() - i);
|
||||
data.push({
|
||||
date: formatDate(date),
|
||||
sessions: Math.floor(Math.random() * 10) + 1,
|
||||
tasks: Math.floor(Math.random() * 25) + 5,
|
||||
});
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
@@ -4,6 +4,9 @@
|
||||
// TanStack Query hooks for API Settings management
|
||||
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { useFormatMessage } from '../hooks/useLocale';
|
||||
import { useNotifications } from '../hooks/useNotifications';
|
||||
import { sanitizeErrorMessage } from '../utils/errorSanitizer';
|
||||
import {
|
||||
fetchProviders,
|
||||
createProvider,
|
||||
@@ -120,12 +123,30 @@ export function useProviders(options: UseProvidersOptions = {}): UseProvidersRet
|
||||
|
||||
export function useCreateProvider() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (provider: Omit<ProviderCredential, 'id' | 'createdAt' | 'updatedAt'>) =>
|
||||
createProvider(provider),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.creating' }),
|
||||
formatMessage({ id: 'common.feedback.providerCreate.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: apiSettingsKeys.providers() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.providerCreate.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'providerCreate');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -138,12 +159,30 @@ export function useCreateProvider() {
|
||||
|
||||
export function useUpdateProvider() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ providerId, updates }: { providerId: string; updates: Partial<Omit<ProviderCredential, 'id' | 'createdAt' | 'updatedAt'>> }) =>
|
||||
updateProvider(providerId, updates),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.providerUpdate.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: apiSettingsKeys.providers() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.providerUpdate.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'providerUpdate');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -157,11 +196,29 @@ export function useUpdateProvider() {
|
||||
|
||||
export function useDeleteProvider() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (providerId: string) => deleteProvider(providerId),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.deleting' }),
|
||||
formatMessage({ id: 'common.feedback.providerDelete.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: apiSettingsKeys.providers() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.providerDelete.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'providerDelete');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
// TanStack Query hooks for CLI endpoint management
|
||||
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { useFormatMessage } from '../hooks/useLocale';
|
||||
import { useNotifications } from '../hooks/useNotifications';
|
||||
import { sanitizeErrorMessage } from '../utils/errorSanitizer';
|
||||
import {
|
||||
fetchCliEndpoints,
|
||||
toggleCliEndpoint,
|
||||
@@ -190,9 +193,30 @@ export function useCliInstallations(options: UseCliInstallationsOptions = {}): U
|
||||
|
||||
export function useInstallCliTool() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (toolName: string) => installCliTool(toolName),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolInstall.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolInstall.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'cliToolInstall');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
},
|
||||
@@ -207,9 +231,30 @@ export function useInstallCliTool() {
|
||||
|
||||
export function useUninstallCliTool() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (toolName: string) => uninstallCliTool(toolName),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolUninstall.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolUninstall.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'cliToolUninstall');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
},
|
||||
@@ -224,9 +269,30 @@ export function useUninstallCliTool() {
|
||||
|
||||
export function useUpgradeCliTool() {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (toolName: string) => upgradeCliTool(toolName),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolUpgrade.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.cliToolUpgrade.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'cliToolUpgrade');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({ queryKey: cliInstallationsKeys.all });
|
||||
},
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
// TanStack Query hooks for CodexLens management
|
||||
|
||||
import { useQuery, useMutation, useQueryClient } from '@tanstack/react-query';
|
||||
import { useFormatMessage } from '../hooks/useLocale';
|
||||
import { useNotifications } from '../hooks/useNotifications';
|
||||
import { sanitizeErrorMessage } from '../utils/errorSanitizer';
|
||||
import {
|
||||
fetchCodexLensDashboardInit,
|
||||
fetchCodexLensStatus,
|
||||
@@ -513,12 +516,30 @@ export interface UseUpdateCodexLensConfigReturn {
|
||||
*/
|
||||
export function useUpdateCodexLensConfig(): UseUpdateCodexLensConfigReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: updateCodexLensConfig,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensConfigUpdate.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.config() });
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.dashboard() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensConfigUpdate.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensConfigUpdate');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -540,11 +561,29 @@ export interface UseBootstrapCodexLensReturn {
|
||||
*/
|
||||
export function useBootstrapCodexLens(): UseBootstrapCodexLensReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: bootstrapCodexLens,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'codexlens.bootstrapping' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensBootstrap.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.all });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensBootstrap.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensBootstrap');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -566,12 +605,30 @@ export interface UseInstallSemanticReturn {
|
||||
*/
|
||||
export function useInstallSemantic(): UseInstallSemanticReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: installCodexLensSemantic,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'codexlens.semantic.installing' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensInstallSemantic.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.all });
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.dashboard() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensInstallSemantic.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensInstallSemantic');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -593,11 +650,29 @@ export interface UseUninstallCodexLensReturn {
|
||||
*/
|
||||
export function useUninstallCodexLens(): UseUninstallCodexLensReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: uninstallCodexLens,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'codexlens.uninstalling' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUninstall.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.all });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUninstall.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensUninstall');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -620,6 +695,8 @@ export interface UseDownloadModelReturn {
|
||||
*/
|
||||
export function useDownloadModel(): UseDownloadModelReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: async ({ profile, modelName, modelType }: { profile?: string; modelName?: string; modelType?: string }) => {
|
||||
@@ -627,8 +704,24 @@ export function useDownloadModel(): UseDownloadModelReturn {
|
||||
if (modelName) return downloadCodexLensCustomModel(modelName, modelType);
|
||||
throw new Error('Either profile or modelName must be provided');
|
||||
},
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'codexlens.models.downloading' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensDownloadModel.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.models() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensDownloadModel.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensDownloadModel');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -652,6 +745,8 @@ export interface UseDeleteModelReturn {
|
||||
*/
|
||||
export function useDeleteModel(): UseDeleteModelReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: async ({ profile, cachePath }: { profile?: string; cachePath?: string }) => {
|
||||
@@ -659,8 +754,24 @@ export function useDeleteModel(): UseDeleteModelReturn {
|
||||
if (cachePath) return deleteCodexLensModelByPath(cachePath);
|
||||
throw new Error('Either profile or cachePath must be provided');
|
||||
},
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.deleting' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensDeleteModel.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.models() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensDeleteModel.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensDeleteModel');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -683,12 +794,30 @@ export interface UseUpdateCodexLensEnvReturn {
|
||||
*/
|
||||
export function useUpdateCodexLensEnv(): UseUpdateCodexLensEnvReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (request: CodexLensUpdateEnvRequest) => updateCodexLensEnv(request),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdateEnv.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.env() });
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.dashboard() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdateEnv.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensUpdateEnv');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -712,18 +841,52 @@ export interface UseSelectGpuReturn {
|
||||
*/
|
||||
export function useSelectGpu(): UseSelectGpuReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const selectMutation = useMutation({
|
||||
mutationFn: (deviceId: string | number) => selectCodexLensGpu(deviceId),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensSelectGpu.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.gpu() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensSelectGpu.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensSelectGpu');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
const resetMutation = useMutation({
|
||||
mutationFn: () => resetCodexLensGpu(),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensResetGpu.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.gpu() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensResetGpu.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensResetGpu');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -747,11 +910,29 @@ export interface UseUpdateIgnorePatternsReturn {
|
||||
*/
|
||||
export function useUpdateIgnorePatterns(): UseUpdateIgnorePatternsReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: updateCodexLensIgnorePatterns,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdatePatterns.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.ignorePatterns() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdatePatterns.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensUpdatePatterns');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -847,6 +1028,8 @@ export interface UseRebuildIndexReturn {
|
||||
*/
|
||||
export function useRebuildIndex(): UseRebuildIndexReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: async ({
|
||||
@@ -861,9 +1044,25 @@ export function useRebuildIndex(): UseRebuildIndexReturn {
|
||||
maxWorkers?: number;
|
||||
};
|
||||
}) => rebuildCodexLensIndex(projectPath, options),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensRebuildIndex.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.indexes() });
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.dashboard() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensRebuildIndex.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensRebuildIndex');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -891,6 +1090,8 @@ export interface UseUpdateIndexReturn {
|
||||
*/
|
||||
export function useUpdateIndex(): UseUpdateIndexReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: async ({
|
||||
@@ -905,9 +1106,25 @@ export function useUpdateIndex(): UseUpdateIndexReturn {
|
||||
maxWorkers?: number;
|
||||
};
|
||||
}) => updateCodexLensIndex(projectPath, options),
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdateIndex.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.indexes() });
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.dashboard() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensUpdateIndex.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensUpdateIndex');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
@@ -930,11 +1147,29 @@ export interface UseCancelIndexingReturn {
|
||||
*/
|
||||
export function useCancelIndexing(): UseCancelIndexingReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const formatMessage = useFormatMessage();
|
||||
const { success, info, error: errorToast } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: cancelCodexLensIndexing,
|
||||
onMutate: () => {
|
||||
info(
|
||||
formatMessage({ id: 'status.inProgress' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensCancelIndexing.success' })
|
||||
);
|
||||
},
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: codexLensKeys.indexingStatus() });
|
||||
success(
|
||||
formatMessage({ id: 'common.success' }),
|
||||
formatMessage({ id: 'common.feedback.codexLensCancelIndexing.success' })
|
||||
);
|
||||
},
|
||||
onError: (err) => {
|
||||
const sanitized = sanitizeErrorMessage(err, 'codexLensCancelIndexing');
|
||||
const message = formatMessage({ id: sanitized.messageKey });
|
||||
const title = formatMessage({ id: 'common.error' });
|
||||
errorToast(title, message);
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -11,6 +11,9 @@ import {
|
||||
type Command,
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { useNotifications } from './useNotifications';
|
||||
import { sanitizeErrorMessage } from '@/utils/errorSanitizer';
|
||||
import { formatMessage } from '@/lib/i18n';
|
||||
|
||||
// Query key factory
|
||||
export const commandsKeys = {
|
||||
@@ -66,21 +69,48 @@ export interface UseCommandMutationsReturn {
|
||||
export function useCommandMutations(): UseCommandMutationsReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
const { addToast, removeToast, success, error } = useNotifications();
|
||||
|
||||
const toggleMutation = useMutation({
|
||||
mutationFn: ({ name, enabled, location }: { name: string; enabled: boolean; location: 'project' | 'user' }) =>
|
||||
toggleCommandApi(name, enabled, location, projectPath),
|
||||
onSuccess: () => {
|
||||
onMutate: (): { loadingId: string } => {
|
||||
const loadingId = addToast('info', formatMessage('common.loading'), undefined, { duration: 0 });
|
||||
return { loadingId };
|
||||
},
|
||||
onSuccess: (_, __, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
success(formatMessage('feedback.commandToggle.success'));
|
||||
queryClient.invalidateQueries({ queryKey: commandsKeys.all });
|
||||
},
|
||||
onError: (err, __, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
const sanitized = sanitizeErrorMessage(err, 'commandToggle');
|
||||
error(formatMessage('common.error'), formatMessage(sanitized.messageKey));
|
||||
},
|
||||
});
|
||||
|
||||
const toggleGroupMutation = useMutation({
|
||||
mutationFn: ({ groupName, enable, location }: { groupName: string; enable: boolean; location: 'project' | 'user' }) =>
|
||||
toggleCommandGroupApi(groupName, enable, location, projectPath),
|
||||
onSuccess: () => {
|
||||
onMutate: (): { loadingId: string } => {
|
||||
const loadingId = addToast('info', formatMessage('common.loading'), undefined, { duration: 0 });
|
||||
return { loadingId };
|
||||
},
|
||||
onSuccess: (_, __, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
success(formatMessage('feedback.commandToggle.success'));
|
||||
queryClient.invalidateQueries({ queryKey: commandsKeys.all });
|
||||
},
|
||||
onError: (err, __, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
const sanitized = sanitizeErrorMessage(err, 'commandToggle');
|
||||
error(formatMessage('common.error'), formatMessage(sanitized.messageKey));
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
@@ -9,6 +9,8 @@ import {
|
||||
createMemory,
|
||||
updateMemory,
|
||||
deleteMemory,
|
||||
archiveMemory as archiveMemoryApi,
|
||||
unarchiveMemory as unarchiveMemoryApi,
|
||||
type CoreMemory,
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
@@ -234,11 +236,7 @@ export function useArchiveMemory(): UseArchiveMemoryReturn {
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (memoryId: string) =>
|
||||
fetch(`/api/core-memory/memories/${encodeURIComponent(memoryId)}/archive?path=${encodeURIComponent(projectPath)}`, {
|
||||
method: 'POST',
|
||||
credentials: 'same-origin',
|
||||
}).then(res => res.json()),
|
||||
mutationFn: (memoryId: string) => archiveMemoryApi(memoryId, projectPath),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.memory(projectPath) : ['memory'] });
|
||||
},
|
||||
@@ -262,13 +260,7 @@ export function useUnarchiveMemory(): UseUnarchiveMemoryReturn {
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: (memoryId: string) =>
|
||||
fetch(`/api/core-memory/memories?path=${encodeURIComponent(projectPath)}`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
credentials: 'same-origin',
|
||||
body: JSON.stringify({ id: memoryId, archived: false }),
|
||||
}).then(res => res.json()),
|
||||
mutationFn: (memoryId: string) => unarchiveMemoryApi(memoryId, projectPath),
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.memory(projectPath) : ['memory'] });
|
||||
},
|
||||
|
||||
172
ccw/frontend/src/hooks/useRealtimeUpdates.ts
Normal file
172
ccw/frontend/src/hooks/useRealtimeUpdates.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
// ========================================
|
||||
// useRealtimeUpdates Hook
|
||||
// ========================================
|
||||
// WebSocket hook for real-time ticker messages with typed handling and reconnection
|
||||
|
||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
||||
import { z } from 'zod';
|
||||
|
||||
// --- Types ---
|
||||
|
||||
export const TickerMessageSchema = z.object({
|
||||
id: z.string(),
|
||||
text: z.string(),
|
||||
type: z.enum(['session', 'task', 'workflow', 'status']),
|
||||
link: z.string().optional(),
|
||||
timestamp: z.number(),
|
||||
});
|
||||
|
||||
export type TickerMessage = z.infer<typeof TickerMessageSchema>;
|
||||
|
||||
export type ConnectionStatus = 'connecting' | 'connected' | 'disconnected' | 'reconnecting';
|
||||
|
||||
export interface RealtimeUpdatesResult {
|
||||
messages: TickerMessage[];
|
||||
connectionStatus: ConnectionStatus;
|
||||
reconnect: () => void;
|
||||
}
|
||||
|
||||
// --- Constants ---
|
||||
|
||||
const RECONNECT_DELAY_BASE = 1000;
|
||||
const RECONNECT_DELAY_MAX = 30000;
|
||||
const RECONNECT_DELAY_MULTIPLIER = 1.5;
|
||||
const MAX_MESSAGES = 50;
|
||||
const MESSAGE_BATCH_DELAY = 500; // Batch messages every 500ms for performance
|
||||
|
||||
// --- Hook ---
|
||||
|
||||
export function useRealtimeUpdates(endpoint: string): RealtimeUpdatesResult {
|
||||
const [messages, setMessages] = useState<TickerMessage[]>([]);
|
||||
const [connectionStatus, setConnectionStatus] = useState<ConnectionStatus>('disconnected');
|
||||
|
||||
const wsRef = useRef<WebSocket | null>(null);
|
||||
const reconnectTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
const reconnectDelayRef = useRef(RECONNECT_DELAY_BASE);
|
||||
|
||||
// Message batching for performance: accumulate messages and flush every 500ms
|
||||
const messageBatchRef = useRef<TickerMessage[]>([]);
|
||||
const batchFlushTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>(null);
|
||||
|
||||
// Flush batched messages to state
|
||||
const flushMessageBatch = useCallback(() => {
|
||||
if (messageBatchRef.current.length > 0) {
|
||||
const batch = [...messageBatchRef.current];
|
||||
messageBatchRef.current = [];
|
||||
|
||||
setMessages((prev) => {
|
||||
const next = [...batch, ...prev];
|
||||
return next.length > MAX_MESSAGES ? next.slice(0, MAX_MESSAGES) : next;
|
||||
});
|
||||
}
|
||||
|
||||
if (batchFlushTimeoutRef.current) {
|
||||
clearTimeout(batchFlushTimeoutRef.current);
|
||||
batchFlushTimeoutRef.current = null;
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Schedule a batch flush
|
||||
const scheduleBatchFlush = useCallback(() => {
|
||||
if (!batchFlushTimeoutRef.current) {
|
||||
batchFlushTimeoutRef.current = setTimeout(() => {
|
||||
flushMessageBatch();
|
||||
}, MESSAGE_BATCH_DELAY);
|
||||
}
|
||||
}, [flushMessageBatch]);
|
||||
|
||||
const scheduleReconnect = useCallback(() => {
|
||||
if (reconnectTimeoutRef.current) {
|
||||
clearTimeout(reconnectTimeoutRef.current);
|
||||
}
|
||||
|
||||
const delay = reconnectDelayRef.current;
|
||||
setConnectionStatus('reconnecting');
|
||||
|
||||
reconnectTimeoutRef.current = setTimeout(() => {
|
||||
connectWs();
|
||||
}, delay);
|
||||
|
||||
reconnectDelayRef.current = Math.min(
|
||||
reconnectDelayRef.current * RECONNECT_DELAY_MULTIPLIER,
|
||||
RECONNECT_DELAY_MAX
|
||||
);
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
|
||||
const connectWs = useCallback(() => {
|
||||
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:';
|
||||
const wsUrl = `${protocol}//${window.location.host}/${endpoint}`;
|
||||
|
||||
try {
|
||||
setConnectionStatus('connecting');
|
||||
const ws = new WebSocket(wsUrl);
|
||||
wsRef.current = ws;
|
||||
|
||||
ws.onopen = () => {
|
||||
setConnectionStatus('connected');
|
||||
reconnectDelayRef.current = RECONNECT_DELAY_BASE;
|
||||
};
|
||||
|
||||
ws.onmessage = (event: MessageEvent) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
const parsed = TickerMessageSchema.safeParse(data);
|
||||
if (parsed.success) {
|
||||
// Add to batch instead of immediate state update
|
||||
messageBatchRef.current.push(parsed.data);
|
||||
// Schedule flush (debounced - only one timer active at a time)
|
||||
scheduleBatchFlush();
|
||||
}
|
||||
} catch {
|
||||
// Ignore malformed messages
|
||||
}
|
||||
};
|
||||
|
||||
ws.onclose = () => {
|
||||
setConnectionStatus('disconnected');
|
||||
wsRef.current = null;
|
||||
scheduleReconnect();
|
||||
};
|
||||
|
||||
ws.onerror = () => {
|
||||
setConnectionStatus('disconnected');
|
||||
};
|
||||
} catch {
|
||||
setConnectionStatus('disconnected');
|
||||
scheduleReconnect();
|
||||
}
|
||||
}, [endpoint, scheduleReconnect]);
|
||||
|
||||
const reconnect = useCallback(() => {
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
}
|
||||
reconnectDelayRef.current = RECONNECT_DELAY_BASE;
|
||||
connectWs();
|
||||
}, [connectWs]);
|
||||
|
||||
useEffect(() => {
|
||||
connectWs();
|
||||
|
||||
return () => {
|
||||
// Flush any remaining batched messages
|
||||
flushMessageBatch();
|
||||
|
||||
if (reconnectTimeoutRef.current) {
|
||||
clearTimeout(reconnectTimeoutRef.current);
|
||||
}
|
||||
if (batchFlushTimeoutRef.current) {
|
||||
clearTimeout(batchFlushTimeoutRef.current);
|
||||
}
|
||||
if (wsRef.current) {
|
||||
wsRef.current.close();
|
||||
wsRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [connectWs, flushMessageBatch]);
|
||||
|
||||
return { messages, connectionStatus, reconnect };
|
||||
}
|
||||
|
||||
export default useRealtimeUpdates;
|
||||
@@ -13,6 +13,9 @@ import {
|
||||
} from '../lib/api';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
import { useNotifications } from './useNotifications';
|
||||
import { sanitizeErrorMessage } from '@/utils/errorSanitizer';
|
||||
import { formatMessage } from '@/lib/i18n';
|
||||
|
||||
// Query key factory
|
||||
export const skillsKeys = {
|
||||
@@ -162,16 +165,34 @@ export interface UseToggleSkillReturn {
|
||||
export function useToggleSkill(): UseToggleSkillReturn {
|
||||
const queryClient = useQueryClient();
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
const { addToast, removeToast, success, error } = useNotifications();
|
||||
|
||||
const mutation = useMutation({
|
||||
mutationFn: ({ skillName, enabled, location }: { skillName: string; enabled: boolean; location: 'project' | 'user' }) =>
|
||||
enabled
|
||||
? enableSkill(skillName, location, projectPath)
|
||||
: disableSkill(skillName, location, projectPath),
|
||||
onSuccess: () => {
|
||||
// Invalidate to ensure sync with server
|
||||
onMutate: (): { loadingId: string } => {
|
||||
const loadingId = addToast('info', formatMessage('common.loading'), undefined, { duration: 0 });
|
||||
return { loadingId };
|
||||
},
|
||||
onSuccess: (_, variables, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
|
||||
const operation = variables.enabled ? 'skillEnable' : 'skillDisable';
|
||||
success(formatMessage(`feedback.${operation}.success`));
|
||||
|
||||
queryClient.invalidateQueries({ queryKey: projectPath ? workspaceQueryKeys.skills(projectPath) : ['skills'] });
|
||||
},
|
||||
onError: (err, variables, context) => {
|
||||
const { loadingId } = context ?? { loadingId: '' };
|
||||
if (loadingId) removeToast(loadingId);
|
||||
|
||||
const operation = variables.enabled ? 'skillEnable' : 'skillDisable';
|
||||
const sanitized = sanitizeErrorMessage(err, operation);
|
||||
error(formatMessage('common.error'), formatMessage(sanitized.messageKey));
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
|
||||
116
ccw/frontend/src/hooks/useTaskTypeCounts.ts
Normal file
116
ccw/frontend/src/hooks/useTaskTypeCounts.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
// ========================================
|
||||
// useTaskTypeCounts Hook
|
||||
// ========================================
|
||||
// TanStack Query hook for fetching task type breakdown
|
||||
|
||||
import { useQuery } from '@tanstack/react-query';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
|
||||
/**
|
||||
* Task type count data structure
|
||||
*/
|
||||
export interface TaskTypeCount {
|
||||
type: string;
|
||||
count: number;
|
||||
percentage?: number;
|
||||
}
|
||||
|
||||
// Query key factory
|
||||
export const taskTypeCountKeys = {
|
||||
all: ['taskTypeCounts'] as const,
|
||||
detail: (projectPath: string) => [...taskTypeCountKeys.all, 'detail', projectPath] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 30 seconds
|
||||
const STALE_TIME = 30 * 1000;
|
||||
|
||||
export interface UseTaskTypeCountsOptions {
|
||||
/** Override default stale time (ms) */
|
||||
staleTime?: number;
|
||||
/** Enable/disable the query */
|
||||
enabled?: boolean;
|
||||
/** Refetch interval (ms), 0 to disable */
|
||||
refetchInterval?: number;
|
||||
}
|
||||
|
||||
export interface UseTaskTypeCountsReturn {
|
||||
/** Task type count data */
|
||||
data: TaskTypeCount[] | undefined;
|
||||
/** Loading state for initial fetch */
|
||||
isLoading: boolean;
|
||||
/** Fetching state (initial or refetch) */
|
||||
isFetching: boolean;
|
||||
/** Error object if query failed */
|
||||
error: Error | null;
|
||||
/** Whether data is stale */
|
||||
isStale: boolean;
|
||||
/** Manually refetch data */
|
||||
refetch: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching task type breakdown
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { data, isLoading, error } = useTaskTypeCounts();
|
||||
*
|
||||
* if (isLoading) return <ChartSkeleton />;
|
||||
* if (error) return <ErrorMessage error={error} />;
|
||||
*
|
||||
* return <TaskTypeBarChart data={data} />;
|
||||
* ```
|
||||
*/
|
||||
export function useTaskTypeCounts(
|
||||
options: UseTaskTypeCountsOptions = {}
|
||||
): UseTaskTypeCountsReturn {
|
||||
const { staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: taskTypeCountKeys.detail(projectPath || ''),
|
||||
queryFn: async () => {
|
||||
if (!projectPath) throw new Error('Project path is required');
|
||||
|
||||
// TODO: Replace with actual API endpoint once backend is ready
|
||||
const response = await fetch(`/api/task-type-counts?projectPath=${encodeURIComponent(projectPath)}`);
|
||||
if (!response.ok) throw new Error('Failed to fetch task type counts');
|
||||
return response.json() as Promise<TaskTypeCount[]>;
|
||||
},
|
||||
staleTime,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
});
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
return {
|
||||
data: query.data,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error,
|
||||
isStale: query.isStale,
|
||||
refetch,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock data generator for development/testing
|
||||
*/
|
||||
export function generateMockTaskTypeCounts(): TaskTypeCount[] {
|
||||
return [
|
||||
{ type: 'implementation', count: 35, percentage: 35 },
|
||||
{ type: 'bugfix', count: 25, percentage: 25 },
|
||||
{ type: 'refactor', count: 18, percentage: 18 },
|
||||
{ type: 'documentation', count: 12, percentage: 12 },
|
||||
{ type: 'testing', count: 7, percentage: 7 },
|
||||
{ type: 'other', count: 3, percentage: 3 },
|
||||
];
|
||||
}
|
||||
177
ccw/frontend/src/hooks/useUserDashboardLayout.ts
Normal file
177
ccw/frontend/src/hooks/useUserDashboardLayout.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
// ========================================
|
||||
// useUserDashboardLayout Hook
|
||||
// ========================================
|
||||
// Hook for managing user's dashboard layout with localStorage persistence
|
||||
|
||||
import { useEffect, useCallback, useRef } from 'react';
|
||||
import { useAppStore } from '@/stores/appStore';
|
||||
import { useLocalStorage } from './useLocalStorage';
|
||||
import type { DashboardLayouts, WidgetConfig } from '@/types/store';
|
||||
import { DEFAULT_DASHBOARD_LAYOUT } from '@/components/dashboard/defaultLayouts';
|
||||
|
||||
const DEBOUNCE_DELAY = 1000; // 1 second debounce for layout saves
|
||||
const STORAGE_KEY = 'ccw-dashboard-layout';
|
||||
|
||||
export interface UseUserDashboardLayoutResult {
|
||||
/** Current dashboard layouts */
|
||||
layouts: DashboardLayouts;
|
||||
/** Current widget configurations */
|
||||
widgets: WidgetConfig[];
|
||||
/** Update layouts (debounced) */
|
||||
updateLayouts: (newLayouts: DashboardLayouts) => void;
|
||||
/** Update widgets configuration */
|
||||
updateWidgets: (newWidgets: WidgetConfig[]) => void;
|
||||
/** Reset to default layout */
|
||||
resetLayout: () => void;
|
||||
/** Whether layout is being saved */
|
||||
isSaving: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for managing dashboard layout with localStorage and Zustand persistence
|
||||
*
|
||||
* Features:
|
||||
* - Loads layout from Zustand store (persisted to localStorage via Zustand)
|
||||
* - Debounced layout updates (1s delay)
|
||||
* - Reset to default layout
|
||||
* - Additional localStorage backup for redundancy
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { layouts, updateLayouts, resetLayout } = useUserDashboardLayout();
|
||||
*
|
||||
* const handleLayoutChange = (newLayouts) => {
|
||||
* updateLayouts(newLayouts);
|
||||
* };
|
||||
* ```
|
||||
*/
|
||||
export function useUserDashboardLayout(): UseUserDashboardLayoutResult {
|
||||
// Get layout from Zustand store
|
||||
const dashboardLayout = useAppStore((state) => state.dashboardLayout);
|
||||
const setDashboardLayouts = useAppStore((state) => state.setDashboardLayouts);
|
||||
const setDashboardWidgets = useAppStore((state) => state.setDashboardWidgets);
|
||||
const resetDashboardLayout = useAppStore((state) => state.resetDashboardLayout);
|
||||
|
||||
// Additional localStorage backup (for redundancy)
|
||||
const [, setLocalStorageLayout] = useLocalStorage(STORAGE_KEY, DEFAULT_DASHBOARD_LAYOUT);
|
||||
|
||||
// Debounce timer ref
|
||||
const debounceTimerRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const isSavingRef = useRef(false);
|
||||
|
||||
// Initialize layout if not set
|
||||
useEffect(() => {
|
||||
if (!dashboardLayout) {
|
||||
// Try to load from localStorage first
|
||||
try {
|
||||
const stored = localStorage.getItem(STORAGE_KEY);
|
||||
if (stored) {
|
||||
const parsed = JSON.parse(stored);
|
||||
setDashboardLayouts(parsed.layouts);
|
||||
setDashboardWidgets(parsed.widgets);
|
||||
} else {
|
||||
// Use default layout
|
||||
resetDashboardLayout();
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to load dashboard layout from localStorage:', error);
|
||||
resetDashboardLayout();
|
||||
}
|
||||
}
|
||||
}, [dashboardLayout, setDashboardLayouts, setDashboardWidgets, resetDashboardLayout]);
|
||||
|
||||
// Update layouts with debouncing
|
||||
const updateLayouts = useCallback(
|
||||
(newLayouts: DashboardLayouts) => {
|
||||
// Clear existing timer
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
}
|
||||
|
||||
// Set saving state
|
||||
isSavingRef.current = true;
|
||||
|
||||
// Debounce the update
|
||||
debounceTimerRef.current = setTimeout(() => {
|
||||
// Update Zustand store (which will persist to localStorage)
|
||||
setDashboardLayouts(newLayouts);
|
||||
|
||||
// Also save to additional localStorage backup
|
||||
const currentWidgets = dashboardLayout?.widgets || DEFAULT_DASHBOARD_LAYOUT.widgets;
|
||||
setLocalStorageLayout({ layouts: newLayouts, widgets: currentWidgets });
|
||||
|
||||
// TODO: When backend API is ready, uncomment this:
|
||||
// syncToBackend({ layouts: newLayouts, widgets: currentWidgets });
|
||||
|
||||
isSavingRef.current = false;
|
||||
}, DEBOUNCE_DELAY);
|
||||
},
|
||||
[dashboardLayout, setDashboardLayouts, setLocalStorageLayout]
|
||||
);
|
||||
|
||||
// Update widgets configuration
|
||||
const updateWidgets = useCallback(
|
||||
(newWidgets: WidgetConfig[]) => {
|
||||
setDashboardWidgets(newWidgets);
|
||||
|
||||
// Also save to localStorage backup
|
||||
const currentLayouts = dashboardLayout?.layouts || DEFAULT_DASHBOARD_LAYOUT.layouts;
|
||||
setLocalStorageLayout({ layouts: currentLayouts, widgets: newWidgets });
|
||||
|
||||
// TODO: When backend API is ready, uncomment this:
|
||||
// syncToBackend({ layouts: currentLayouts, widgets: newWidgets });
|
||||
},
|
||||
[dashboardLayout, setDashboardWidgets, setLocalStorageLayout]
|
||||
);
|
||||
|
||||
// Reset to default layout
|
||||
const resetLayout = useCallback(() => {
|
||||
// Clear debounce timer
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
}
|
||||
|
||||
// Reset Zustand store
|
||||
resetDashboardLayout();
|
||||
|
||||
// Reset localStorage backup
|
||||
setLocalStorageLayout(DEFAULT_DASHBOARD_LAYOUT);
|
||||
|
||||
// TODO: When backend API is ready, uncomment this:
|
||||
// syncToBackend(DEFAULT_DASHBOARD_LAYOUT);
|
||||
}, [resetDashboardLayout, setLocalStorageLayout]);
|
||||
|
||||
// Cleanup on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (debounceTimerRef.current) {
|
||||
clearTimeout(debounceTimerRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
return {
|
||||
layouts: dashboardLayout?.layouts || DEFAULT_DASHBOARD_LAYOUT.layouts,
|
||||
widgets: dashboardLayout?.widgets || DEFAULT_DASHBOARD_LAYOUT.widgets,
|
||||
updateLayouts,
|
||||
updateWidgets,
|
||||
resetLayout,
|
||||
isSaving: isSavingRef.current,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* TODO: Implement backend sync when API is ready
|
||||
*
|
||||
* async function syncToBackend(layout: DashboardLayoutState) {
|
||||
* try {
|
||||
* await fetch('/api/user/dashboard-layout', {
|
||||
* method: 'PUT',
|
||||
* headers: { 'Content-Type': 'application/json' },
|
||||
* body: JSON.stringify(layout),
|
||||
* });
|
||||
* } catch (error) {
|
||||
* console.error('Failed to sync dashboard layout to backend:', error);
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
118
ccw/frontend/src/hooks/useWorkflowStatusCounts.ts
Normal file
118
ccw/frontend/src/hooks/useWorkflowStatusCounts.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
// ========================================
|
||||
// useWorkflowStatusCounts Hook
|
||||
// ========================================
|
||||
// TanStack Query hook for fetching workflow status distribution
|
||||
|
||||
import { useQuery, useQueryClient } from '@tanstack/react-query';
|
||||
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||
import { workspaceQueryKeys } from '@/lib/queryKeys';
|
||||
|
||||
/**
|
||||
* Workflow status count data structure
|
||||
*/
|
||||
export interface WorkflowStatusCount {
|
||||
status: 'planning' | 'in_progress' | 'completed' | 'paused' | 'archived';
|
||||
count: number;
|
||||
percentage?: number;
|
||||
}
|
||||
|
||||
// Query key factory
|
||||
export const workflowStatusCountKeys = {
|
||||
all: ['workflowStatusCounts'] as const,
|
||||
detail: (projectPath: string) => [...workflowStatusCountKeys.all, 'detail', projectPath] as const,
|
||||
};
|
||||
|
||||
// Default stale time: 30 seconds
|
||||
const STALE_TIME = 30 * 1000;
|
||||
|
||||
export interface UseWorkflowStatusCountsOptions {
|
||||
/** Override default stale time (ms) */
|
||||
staleTime?: number;
|
||||
/** Enable/disable the query */
|
||||
enabled?: boolean;
|
||||
/** Refetch interval (ms), 0 to disable */
|
||||
refetchInterval?: number;
|
||||
}
|
||||
|
||||
export interface UseWorkflowStatusCountsReturn {
|
||||
/** Workflow status count data */
|
||||
data: WorkflowStatusCount[] | undefined;
|
||||
/** Loading state for initial fetch */
|
||||
isLoading: boolean;
|
||||
/** Fetching state (initial or refetch) */
|
||||
isFetching: boolean;
|
||||
/** Error object if query failed */
|
||||
error: Error | null;
|
||||
/** Whether data is stale */
|
||||
isStale: boolean;
|
||||
/** Manually refetch data */
|
||||
refetch: () => Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hook for fetching workflow status distribution
|
||||
*
|
||||
* @example
|
||||
* ```tsx
|
||||
* const { data, isLoading, error } = useWorkflowStatusCounts();
|
||||
*
|
||||
* if (isLoading) return <ChartSkeleton />;
|
||||
* if (error) return <ErrorMessage error={error} />;
|
||||
*
|
||||
* return <WorkflowStatusPieChart data={data} />;
|
||||
* ```
|
||||
*/
|
||||
export function useWorkflowStatusCounts(
|
||||
options: UseWorkflowStatusCountsOptions = {}
|
||||
): UseWorkflowStatusCountsReturn {
|
||||
const { staleTime = STALE_TIME, enabled = true, refetchInterval = 0 } = options;
|
||||
const projectPath = useWorkflowStore(selectProjectPath);
|
||||
|
||||
// Only enable query when projectPath is available
|
||||
const queryEnabled = enabled && !!projectPath;
|
||||
|
||||
const query = useQuery({
|
||||
queryKey: workflowStatusCountKeys.detail(projectPath || ''),
|
||||
queryFn: async () => {
|
||||
if (!projectPath) throw new Error('Project path is required');
|
||||
|
||||
// TODO: Replace with actual API endpoint once backend is ready
|
||||
// For now, return mock data matching expected format
|
||||
const response = await fetch(`/api/workflow-status-counts?projectPath=${encodeURIComponent(projectPath)}`);
|
||||
if (!response.ok) throw new Error('Failed to fetch workflow status counts');
|
||||
return response.json() as Promise<WorkflowStatusCount[]>;
|
||||
},
|
||||
staleTime,
|
||||
enabled: queryEnabled,
|
||||
refetchInterval: refetchInterval > 0 ? refetchInterval : false,
|
||||
retry: 2,
|
||||
retryDelay: (attemptIndex) => Math.min(1000 * 2 ** attemptIndex, 10000),
|
||||
});
|
||||
|
||||
const refetch = async () => {
|
||||
await query.refetch();
|
||||
};
|
||||
|
||||
return {
|
||||
data: query.data,
|
||||
isLoading: query.isLoading,
|
||||
isFetching: query.isFetching,
|
||||
error: query.error,
|
||||
isStale: query.isStale,
|
||||
refetch,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock data generator for development/testing
|
||||
*/
|
||||
export function generateMockWorkflowStatusCounts(): WorkflowStatusCount[] {
|
||||
const statuses: WorkflowStatusCount[] = [
|
||||
{ status: 'completed', count: 45, percentage: 45 },
|
||||
{ status: 'in_progress', count: 28, percentage: 28 },
|
||||
{ status: 'planning', count: 15, percentage: 15 },
|
||||
{ status: 'paused', count: 8, percentage: 8 },
|
||||
{ status: 'archived', count: 4, percentage: 4 },
|
||||
];
|
||||
return statuses;
|
||||
}
|
||||
Reference in New Issue
Block a user