mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: Implement Cross-CLI Sync Panel for MCP servers
- Added CrossCliSyncPanel component for synchronizing MCP servers between Claude and Codex. - Implemented server selection, copy operations, and result handling. - Added tests for path mapping on Windows drives. - Created E2E tests for ask_question Answer Broker functionality. - Introduced MCP Tools Test Script for validating modified read_file and edit_file tools. - Updated path_mapper to ensure correct drive formatting on Windows. - Added .gitignore for ace-tool directory.
This commit is contained in:
@@ -1,4 +1,6 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
|
new line
|
||||||
|
new line
|
||||||
|
|
||||||
<!-- Animated Header -->
|
<!-- Animated Header -->
|
||||||
<img src="https://capsule-render.vercel.app/api?type=waving&color=gradient&customColorList=6,11,20&height=180§ion=header&text=Claude%20Code%20Workflow&fontSize=42&fontColor=fff&animation=twinkling&fontAlignY=32&desc=Multi-Agent%20AI%20Development%20Framework&descAlignY=52&descSize=18"/>
|
<img src="https://capsule-render.vercel.app/api?type=waving&color=gradient&customColorList=6,11,20&height=180§ion=header&text=Claude%20Code%20Workflow&fontSize=42&fontColor=fff&animation=twinkling&fontAlignY=32&desc=Multi-Agent%20AI%20Development%20Framework&descAlignY=52&descSize=18"/>
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
// ========================================
|
// ========================================
|
||||||
// Table component displaying all recent projects with MCP server statistics
|
// Table component displaying all recent projects with MCP server statistics
|
||||||
|
|
||||||
import { useState } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import { useIntl } from 'react-intl';
|
import { useIntl } from 'react-intl';
|
||||||
import { Folder, Clock, Database, ExternalLink } from 'lucide-react';
|
import { Folder, Clock, Database, ExternalLink } from 'lucide-react';
|
||||||
import { Card } from '@/components/ui/Card';
|
import { Card } from '@/components/ui/Card';
|
||||||
@@ -11,6 +11,7 @@ import { Badge } from '@/components/ui/Badge';
|
|||||||
import { useProjectOperations } from '@/hooks';
|
import { useProjectOperations } from '@/hooks';
|
||||||
import { cn } from '@/lib/utils';
|
import { cn } from '@/lib/utils';
|
||||||
import { formatDistanceToNow } from 'date-fns';
|
import { formatDistanceToNow } from 'date-fns';
|
||||||
|
import { fetchOtherProjectsServers } from '@/lib/api';
|
||||||
|
|
||||||
// ========== Types ==========
|
// ========== Types ==========
|
||||||
|
|
||||||
@@ -32,6 +33,8 @@ export interface AllProjectsTableProps {
|
|||||||
className?: string;
|
className?: string;
|
||||||
/** Maximum number of projects to display */
|
/** Maximum number of projects to display */
|
||||||
maxProjects?: number;
|
maxProjects?: number;
|
||||||
|
/** Project paths to display (if not provided, fetches from useProjectOperations) */
|
||||||
|
projectPaths?: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
// ========== Component ==========
|
// ========== Component ==========
|
||||||
@@ -41,29 +44,65 @@ export function AllProjectsTable({
|
|||||||
onOpenNewWindow,
|
onOpenNewWindow,
|
||||||
className,
|
className,
|
||||||
maxProjects,
|
maxProjects,
|
||||||
|
projectPaths: propProjectPaths,
|
||||||
}: AllProjectsTableProps) {
|
}: AllProjectsTableProps) {
|
||||||
const { formatMessage } = useIntl();
|
const { formatMessage } = useIntl();
|
||||||
const [sortField, setSortField] = useState<'name' | 'serverCount' | 'lastModified'>('lastModified');
|
const [sortField, setSortField] = useState<'name' | 'serverCount' | 'lastModified'>('lastModified');
|
||||||
const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('desc');
|
const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('desc');
|
||||||
|
const [projectStats, setProjectStats] = useState<ProjectServerStats[]>([]);
|
||||||
|
const [isStatsLoading, setIsStatsLoading] = useState(false);
|
||||||
|
|
||||||
const { projects, currentProject, isLoading } = useProjectOperations();
|
const { projects, currentProject, isLoading } = useProjectOperations();
|
||||||
|
|
||||||
// Mock server counts since backend doesn't provide per-project stats
|
// Use provided project paths or default to all projects
|
||||||
// In production, this would come from a dedicated API endpoint
|
const targetProjectPaths = propProjectPaths ?? projects;
|
||||||
const projectStats: ProjectServerStats[] = projects.slice(0, maxProjects).map((path) => {
|
const displayProjects = maxProjects ? targetProjectPaths.slice(0, maxProjects) : targetProjectPaths;
|
||||||
const isCurrent = path === currentProject;
|
|
||||||
// Extract name from path (last segment)
|
|
||||||
const name = path.split(/[/\\]/).filter(Boolean).pop() || path;
|
|
||||||
|
|
||||||
return {
|
// Fetch real project server stats on mount
|
||||||
name,
|
useEffect(() => {
|
||||||
path,
|
const fetchStats = async () => {
|
||||||
serverCount: Math.floor(Math.random() * 10), // Mock data
|
if (displayProjects.length === 0) {
|
||||||
enabledCount: Math.floor(Math.random() * 8), // Mock data
|
setProjectStats([]);
|
||||||
lastModified: new Date(Date.now() - Math.random() * 30 * 24 * 60 * 60 * 1000).toISOString(),
|
return;
|
||||||
isCurrent,
|
}
|
||||||
|
|
||||||
|
setIsStatsLoading(true);
|
||||||
|
try {
|
||||||
|
const response = await fetchOtherProjectsServers(displayProjects);
|
||||||
|
const stats: ProjectServerStats[] = displayProjects.map((path) => {
|
||||||
|
const isCurrent = path === currentProject;
|
||||||
|
const name = path.split(/[/\\]/).filter(Boolean).pop() || path;
|
||||||
|
const servers = response.servers[path] ?? [];
|
||||||
|
|
||||||
|
return {
|
||||||
|
name,
|
||||||
|
path,
|
||||||
|
serverCount: servers.length,
|
||||||
|
enabledCount: servers.filter((s) => s.enabled).length,
|
||||||
|
lastModified: undefined, // Backend doesn't provide this yet
|
||||||
|
isCurrent,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
setProjectStats(stats);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to fetch project server stats:', error);
|
||||||
|
// Fallback to empty stats on error
|
||||||
|
setProjectStats(
|
||||||
|
displayProjects.map((path) => ({
|
||||||
|
name: path.split(/[/\\]/).filter(Boolean).pop() || path,
|
||||||
|
path,
|
||||||
|
serverCount: 0,
|
||||||
|
enabledCount: 0,
|
||||||
|
isCurrent: path === currentProject,
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
setIsStatsLoading(false);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
});
|
|
||||||
|
void fetchStats();
|
||||||
|
}, [displayProjects, currentProject]);
|
||||||
|
|
||||||
// Sort projects
|
// Sort projects
|
||||||
const sortedProjects = [...projectStats].sort((a, b) => {
|
const sortedProjects = [...projectStats].sort((a, b) => {
|
||||||
@@ -107,7 +146,7 @@ export function AllProjectsTable({
|
|||||||
onOpenNewWindow?.(projectPath);
|
onOpenNewWindow?.(projectPath);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (isLoading) {
|
if (isLoading || isStatsLoading) {
|
||||||
return (
|
return (
|
||||||
<Card className={cn('p-8', className)}>
|
<Card className={cn('p-8', className)}>
|
||||||
<div className="flex items-center justify-center">
|
<div className="flex items-center justify-center">
|
||||||
|
|||||||
@@ -30,6 +30,9 @@ import {
|
|||||||
installCcwMcp,
|
installCcwMcp,
|
||||||
uninstallCcwMcp,
|
uninstallCcwMcp,
|
||||||
updateCcwConfig,
|
updateCcwConfig,
|
||||||
|
installCcwMcpToCodex,
|
||||||
|
uninstallCcwMcpFromCodex,
|
||||||
|
updateCcwConfigForCodex,
|
||||||
} from '@/lib/api';
|
} from '@/lib/api';
|
||||||
import { mcpServersKeys } from '@/hooks';
|
import { mcpServersKeys } from '@/hooks';
|
||||||
import { useQueryClient } from '@tanstack/react-query';
|
import { useQueryClient } from '@tanstack/react-query';
|
||||||
@@ -77,6 +80,8 @@ export interface CcwToolsMcpCardProps {
|
|||||||
onUpdateConfig: (config: Partial<CcwConfig>) => void;
|
onUpdateConfig: (config: Partial<CcwConfig>) => void;
|
||||||
/** Callback when install/uninstall is triggered */
|
/** Callback when install/uninstall is triggered */
|
||||||
onInstall: () => void;
|
onInstall: () => void;
|
||||||
|
/** Installation target: Claude or Codex */
|
||||||
|
target?: 'claude' | 'codex';
|
||||||
}
|
}
|
||||||
|
|
||||||
// ========== Constants ==========
|
// ========== Constants ==========
|
||||||
@@ -105,6 +110,7 @@ export function CcwToolsMcpCard({
|
|||||||
onToggleTool,
|
onToggleTool,
|
||||||
onUpdateConfig,
|
onUpdateConfig,
|
||||||
onInstall,
|
onInstall,
|
||||||
|
target = 'claude',
|
||||||
}: CcwToolsMcpCardProps) {
|
}: CcwToolsMcpCardProps) {
|
||||||
const { formatMessage } = useIntl();
|
const { formatMessage } = useIntl();
|
||||||
const queryClient = useQueryClient();
|
const queryClient = useQueryClient();
|
||||||
@@ -117,22 +123,36 @@ export function CcwToolsMcpCard({
|
|||||||
const [isExpanded, setIsExpanded] = useState(false);
|
const [isExpanded, setIsExpanded] = useState(false);
|
||||||
const [installScope, setInstallScope] = useState<'global' | 'project'>('global');
|
const [installScope, setInstallScope] = useState<'global' | 'project'>('global');
|
||||||
|
|
||||||
|
const isCodex = target === 'codex';
|
||||||
|
|
||||||
// Mutations for install/uninstall
|
// Mutations for install/uninstall
|
||||||
const installMutation = useMutation({
|
const installMutation = useMutation({
|
||||||
mutationFn: (params: { scope: 'global' | 'project'; projectPath?: string }) =>
|
mutationFn: isCodex
|
||||||
installCcwMcp(params.scope, params.projectPath),
|
? () => installCcwMcpToCodex()
|
||||||
|
: (params: { scope: 'global' | 'project'; projectPath?: string }) =>
|
||||||
|
installCcwMcp(params.scope, params.projectPath),
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
if (isCodex) {
|
||||||
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfig'] });
|
queryClient.invalidateQueries({ queryKey: ['codexMcpServers'] });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfigCodex'] });
|
||||||
|
} else {
|
||||||
|
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfig'] });
|
||||||
|
}
|
||||||
onInstall();
|
onInstall();
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const uninstallMutation = useMutation({
|
const uninstallMutation = useMutation({
|
||||||
mutationFn: uninstallCcwMcp,
|
mutationFn: isCodex ? uninstallCcwMcpFromCodex : uninstallCcwMcp,
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
if (isCodex) {
|
||||||
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfig'] });
|
queryClient.invalidateQueries({ queryKey: ['codexMcpServers'] });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfigCodex'] });
|
||||||
|
} else {
|
||||||
|
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['ccwMcpConfig'] });
|
||||||
|
}
|
||||||
onInstall();
|
onInstall();
|
||||||
},
|
},
|
||||||
onError: (error) => {
|
onError: (error) => {
|
||||||
@@ -141,9 +161,13 @@ export function CcwToolsMcpCard({
|
|||||||
});
|
});
|
||||||
|
|
||||||
const updateConfigMutation = useMutation({
|
const updateConfigMutation = useMutation({
|
||||||
mutationFn: updateCcwConfig,
|
mutationFn: isCodex ? updateCcwConfigForCodex : updateCcwConfig,
|
||||||
onSuccess: () => {
|
onSuccess: () => {
|
||||||
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
if (isCodex) {
|
||||||
|
queryClient.invalidateQueries({ queryKey: ['codexMcpServers'] });
|
||||||
|
} else {
|
||||||
|
queryClient.invalidateQueries({ queryKey: mcpServersKeys.all });
|
||||||
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -170,10 +194,14 @@ export function CcwToolsMcpCard({
|
|||||||
};
|
};
|
||||||
|
|
||||||
const handleInstallClick = () => {
|
const handleInstallClick = () => {
|
||||||
installMutation.mutate({
|
if (isCodex) {
|
||||||
scope: installScope,
|
(installMutation as any).mutate(undefined);
|
||||||
projectPath: installScope === 'project' ? currentProjectPath : undefined,
|
} else {
|
||||||
});
|
(installMutation as any).mutate({
|
||||||
|
scope: installScope,
|
||||||
|
projectPath: installScope === 'project' ? currentProjectPath : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleUninstallClick = () => {
|
const handleUninstallClick = () => {
|
||||||
@@ -213,6 +241,11 @@ export function CcwToolsMcpCard({
|
|||||||
<Badge variant={isInstalled ? 'default' : 'secondary'} className="text-xs">
|
<Badge variant={isInstalled ? 'default' : 'secondary'} className="text-xs">
|
||||||
{isInstalled ? formatMessage({ id: 'mcp.ccw.status.installed' }) : formatMessage({ id: 'mcp.ccw.status.notInstalled' })}
|
{isInstalled ? formatMessage({ id: 'mcp.ccw.status.installed' }) : formatMessage({ id: 'mcp.ccw.status.notInstalled' })}
|
||||||
</Badge>
|
</Badge>
|
||||||
|
{isCodex && (
|
||||||
|
<Badge variant="outline" className="text-xs text-blue-500">
|
||||||
|
Codex
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
{isInstalled && (
|
{isInstalled && (
|
||||||
<Badge variant="outline" className="text-xs text-info">
|
<Badge variant="outline" className="text-xs text-info">
|
||||||
{formatMessage({ id: 'mcp.ccw.status.special' })}
|
{formatMessage({ id: 'mcp.ccw.status.special' })}
|
||||||
@@ -388,8 +421,8 @@ export function CcwToolsMcpCard({
|
|||||||
|
|
||||||
{/* Install/Uninstall Button */}
|
{/* Install/Uninstall Button */}
|
||||||
<div className="pt-3 border-t border-border space-y-3">
|
<div className="pt-3 border-t border-border space-y-3">
|
||||||
{/* Scope Selection */}
|
{/* Scope Selection - Claude only (Codex is always global) */}
|
||||||
{!isInstalled && (
|
{!isInstalled && !isCodex && (
|
||||||
<div className="space-y-2">
|
<div className="space-y-2">
|
||||||
<p className="text-xs font-medium text-muted-foreground uppercase">
|
<p className="text-xs font-medium text-muted-foreground uppercase">
|
||||||
{formatMessage({ id: 'mcp.scope' })}
|
{formatMessage({ id: 'mcp.scope' })}
|
||||||
@@ -422,6 +455,12 @@ export function CcwToolsMcpCard({
|
|||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
{/* Codex note */}
|
||||||
|
{isCodex && !isInstalled && (
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.ccw.codexNote' })}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
{!isInstalled ? (
|
{!isInstalled ? (
|
||||||
<Button
|
<Button
|
||||||
onClick={handleInstallClick}
|
onClick={handleInstallClick}
|
||||||
@@ -430,7 +469,7 @@ export function CcwToolsMcpCard({
|
|||||||
>
|
>
|
||||||
{isPending
|
{isPending
|
||||||
? formatMessage({ id: 'mcp.ccw.actions.installing' })
|
? formatMessage({ id: 'mcp.ccw.actions.installing' })
|
||||||
: formatMessage({ id: 'mcp.ccw.actions.install' })
|
: formatMessage({ id: isCodex ? 'mcp.ccw.actions.installCodex' : 'mcp.ccw.actions.install' })
|
||||||
}
|
}
|
||||||
</Button>
|
</Button>
|
||||||
) : (
|
) : (
|
||||||
|
|||||||
510
ccw/frontend/src/components/mcp/CrossCliSyncPanel.tsx
Normal file
510
ccw/frontend/src/components/mcp/CrossCliSyncPanel.tsx
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
// ========================================
|
||||||
|
// Cross-CLI Sync Panel Component
|
||||||
|
// ========================================
|
||||||
|
// Inline panel for synchronizing MCP servers between Claude and Codex
|
||||||
|
|
||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
import { useIntl } from 'react-intl';
|
||||||
|
import { ArrowRight, ArrowLeft, CheckCircle2, AlertCircle, Loader2 } from 'lucide-react';
|
||||||
|
import { Checkbox } from '@/components/ui/Checkbox';
|
||||||
|
import { Badge } from '@/components/ui/Badge';
|
||||||
|
import { Button } from '@/components/ui/Button';
|
||||||
|
import { useMcpServers } from '@/hooks';
|
||||||
|
import { crossCliCopy, fetchCodexMcpServers } from '@/lib/api';
|
||||||
|
import { cn } from '@/lib/utils';
|
||||||
|
import { useWorkflowStore, selectProjectPath } from '@/stores/workflowStore';
|
||||||
|
|
||||||
|
// ========== Types ==========
|
||||||
|
|
||||||
|
export interface CrossCliSyncPanelProps {
|
||||||
|
/** Callback when copy is successful */
|
||||||
|
onSuccess?: (copiedCount: number, direction: 'to-codex' | 'from-codex') => void;
|
||||||
|
/** Additional class name */
|
||||||
|
className?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ServerCheckboxItem {
|
||||||
|
name: string;
|
||||||
|
command: string;
|
||||||
|
enabled: boolean;
|
||||||
|
selected: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
type CopyDirection = 'to-codex' | 'from-codex';
|
||||||
|
|
||||||
|
// ========== Component ==========
|
||||||
|
|
||||||
|
export function CrossCliSyncPanel({ onSuccess, className }: CrossCliSyncPanelProps) {
|
||||||
|
const { formatMessage } = useIntl();
|
||||||
|
const projectPath = useWorkflowStore(selectProjectPath);
|
||||||
|
|
||||||
|
// Claude servers state
|
||||||
|
const { servers: claudeServers } = useMcpServers();
|
||||||
|
const [selectedClaude, setSelectedClaude] = useState<Set<string>>(new Set());
|
||||||
|
|
||||||
|
// Codex servers state
|
||||||
|
const [codexServers, setCodexServers] = useState<ServerCheckboxItem[]>([]);
|
||||||
|
const [selectedCodex, setSelectedCodex] = useState<Set<string>>(new Set());
|
||||||
|
const [isLoadingCodex, setIsLoadingCodex] = useState(false);
|
||||||
|
const [codexError, setCodexError] = useState<string | null>(null);
|
||||||
|
|
||||||
|
// Copy operation state
|
||||||
|
const [isCopying, setIsCopying] = useState(false);
|
||||||
|
const [copyResult, setCopyResult] = useState<{
|
||||||
|
type: 'success' | 'partial' | null;
|
||||||
|
copied: number;
|
||||||
|
failed: number;
|
||||||
|
}>({ type: null, copied: 0, failed: 0 });
|
||||||
|
|
||||||
|
// Load Codex servers on mount
|
||||||
|
useEffect(() => {
|
||||||
|
const loadCodexServers = async () => {
|
||||||
|
setIsLoadingCodex(true);
|
||||||
|
setCodexError(null);
|
||||||
|
try {
|
||||||
|
const codex = await fetchCodexMcpServers();
|
||||||
|
setCodexServers(
|
||||||
|
(codex.servers ?? []).map((s) => ({
|
||||||
|
name: s.name,
|
||||||
|
command: s.command,
|
||||||
|
enabled: s.enabled,
|
||||||
|
selected: false,
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to load Codex MCP servers:', error);
|
||||||
|
setCodexError(formatMessage({ id: 'mcp.sync.codexLoadError' }));
|
||||||
|
setCodexServers([]);
|
||||||
|
} finally {
|
||||||
|
setIsLoadingCodex(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void loadCodexServers();
|
||||||
|
}, [formatMessage]);
|
||||||
|
|
||||||
|
// Claude server handlers
|
||||||
|
const toggleClaudeServer = (name: string) => {
|
||||||
|
setSelectedClaude((prev) => {
|
||||||
|
const next = new Set(prev);
|
||||||
|
if (next.has(name)) {
|
||||||
|
next.delete(name);
|
||||||
|
} else {
|
||||||
|
next.add(name);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectAllClaude = () => {
|
||||||
|
setSelectedClaude(new Set(claudeServers.map((s) => s.name)));
|
||||||
|
};
|
||||||
|
|
||||||
|
const clearAllClaude = () => {
|
||||||
|
setSelectedClaude(new Set());
|
||||||
|
};
|
||||||
|
|
||||||
|
// Codex server handlers
|
||||||
|
const toggleCodexServer = (name: string) => {
|
||||||
|
setSelectedCodex((prev) => {
|
||||||
|
const next = new Set(prev);
|
||||||
|
if (next.has(name)) {
|
||||||
|
next.delete(name);
|
||||||
|
} else {
|
||||||
|
next.add(name);
|
||||||
|
}
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
setCodexServers((prev) =>
|
||||||
|
prev.map((s) => (s.name === name ? { ...s, selected: !s.selected } : s))
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
|
const selectAllCodex = () => {
|
||||||
|
const allNames = codexServers.map((s) => s.name);
|
||||||
|
setSelectedCodex(new Set(allNames));
|
||||||
|
setCodexServers((prev) => prev.map((s) => ({ ...s, selected: true })));
|
||||||
|
};
|
||||||
|
|
||||||
|
const clearAllCodex = () => {
|
||||||
|
setSelectedCodex(new Set());
|
||||||
|
setCodexServers((prev) => prev.map((s) => ({ ...s, selected: false })));
|
||||||
|
};
|
||||||
|
|
||||||
|
// Copy handlers
|
||||||
|
const handleCopyToCodex = async () => {
|
||||||
|
if (selectedClaude.size === 0) return;
|
||||||
|
|
||||||
|
setIsCopying(true);
|
||||||
|
setCopyResult({ type: null, copied: 0, failed: 0 });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await crossCliCopy({
|
||||||
|
source: 'claude',
|
||||||
|
target: 'codex',
|
||||||
|
serverNames: Array.from(selectedClaude),
|
||||||
|
projectPath: projectPath ?? undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
const failedCount = result.failed.length;
|
||||||
|
const copiedCount = result.copied.length;
|
||||||
|
|
||||||
|
setCopyResult({
|
||||||
|
type: failedCount > 0 ? 'partial' : 'success',
|
||||||
|
copied: copiedCount,
|
||||||
|
failed: failedCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
onSuccess?.(copiedCount, 'to-codex');
|
||||||
|
|
||||||
|
// Clear selection after successful copy
|
||||||
|
setSelectedClaude(new Set());
|
||||||
|
|
||||||
|
// Auto-hide result after 3 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
setCopyResult({ type: null, copied: 0, failed: 0 });
|
||||||
|
}, 3000);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to copy to Codex:', error);
|
||||||
|
setCopyResult({ type: 'partial', copied: 0, failed: selectedClaude.size });
|
||||||
|
} finally {
|
||||||
|
setIsCopying(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCopyFromCodex = async () => {
|
||||||
|
if (selectedCodex.size === 0 || !projectPath) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsCopying(true);
|
||||||
|
setCopyResult({ type: null, copied: 0, failed: 0 });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await crossCliCopy({
|
||||||
|
source: 'codex',
|
||||||
|
target: 'claude',
|
||||||
|
serverNames: Array.from(selectedCodex),
|
||||||
|
projectPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
const failedCount = result.failed.length;
|
||||||
|
const copiedCount = result.copied.length;
|
||||||
|
|
||||||
|
setCopyResult({
|
||||||
|
type: failedCount > 0 ? 'partial' : 'success',
|
||||||
|
copied: copiedCount,
|
||||||
|
failed: failedCount,
|
||||||
|
});
|
||||||
|
|
||||||
|
onSuccess?.(copiedCount, 'from-codex');
|
||||||
|
|
||||||
|
// Clear selection after successful copy
|
||||||
|
setSelectedCodex(new Set());
|
||||||
|
setCodexServers((prev) => prev.map((s) => ({ ...s, selected: false })));
|
||||||
|
|
||||||
|
// Auto-hide result after 3 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
setCopyResult({ type: null, copied: 0, failed: 0 });
|
||||||
|
}, 3000);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to copy from Codex:', error);
|
||||||
|
setCopyResult({ type: 'partial', copied: 0, failed: selectedCodex.size });
|
||||||
|
} finally {
|
||||||
|
setIsCopying(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Computed values
|
||||||
|
const claudeTotal = claudeServers.length;
|
||||||
|
const claudeSelected = selectedClaude.size;
|
||||||
|
const codexTotal = codexServers.length;
|
||||||
|
const codexSelected = selectedCodex.size;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn('space-y-4', className)}>
|
||||||
|
{/* Header */}
|
||||||
|
<div className="text-center">
|
||||||
|
<h3 className="text-base font-semibold text-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.sync.title' })}
|
||||||
|
</h3>
|
||||||
|
<p className="text-sm text-muted-foreground mt-1">
|
||||||
|
{formatMessage({ id: 'mcp.sync.description' })}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Result Message */}
|
||||||
|
{copyResult.type !== null && (
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
'flex items-center gap-2 p-3 rounded-lg border',
|
||||||
|
copyResult.type === 'success'
|
||||||
|
? 'bg-success/10 border-success/30 text-success'
|
||||||
|
: 'bg-warning/10 border-warning/30 text-warning'
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{copyResult.type === 'success' ? (
|
||||||
|
<CheckCircle2 className="w-4 h-4 flex-shrink-0" />
|
||||||
|
) : (
|
||||||
|
<AlertCircle className="w-4 h-4 flex-shrink-0" />
|
||||||
|
)}
|
||||||
|
<span className="text-sm">
|
||||||
|
{copyResult.type === 'success'
|
||||||
|
? formatMessage(
|
||||||
|
{ id: 'mcp.sync.copySuccess' },
|
||||||
|
{ count: copyResult.copied }
|
||||||
|
)
|
||||||
|
: formatMessage(
|
||||||
|
{ id: 'mcp.sync.copyPartial' },
|
||||||
|
{ copied: copyResult.copied, failed: copyResult.failed }
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Two-column layout */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||||
|
{/* Claude Column */}
|
||||||
|
<div className="border border-border rounded-lg overflow-hidden">
|
||||||
|
{/* Column Header */}
|
||||||
|
<div className="bg-muted/50 px-4 py-3 border-b border-border">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h4 className="text-sm font-medium text-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.sync.claudeColumn' })}
|
||||||
|
</h4>
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
{formatMessage(
|
||||||
|
{ id: 'mcp.sync.selectedCount' },
|
||||||
|
{ count: claudeSelected, total: claudeTotal }
|
||||||
|
)}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Claude Server List */}
|
||||||
|
<div className="p-2 max-h-64 overflow-y-auto">
|
||||||
|
{claudeTotal === 0 ? (
|
||||||
|
<div className="py-8 text-center text-sm text-muted-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.sync.noServers' })}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-1">
|
||||||
|
{claudeServers.map((server) => (
|
||||||
|
<div
|
||||||
|
key={server.name}
|
||||||
|
className={cn(
|
||||||
|
'flex items-start gap-2 p-2 rounded cursor-pointer transition-colors',
|
||||||
|
selectedClaude.has(server.name)
|
||||||
|
? 'bg-primary/10'
|
||||||
|
: 'hover:bg-muted/50'
|
||||||
|
)}
|
||||||
|
onClick={() => toggleClaudeServer(server.name)}
|
||||||
|
>
|
||||||
|
<Checkbox
|
||||||
|
id={`claude-${server.name}`}
|
||||||
|
checked={selectedClaude.has(server.name)}
|
||||||
|
onChange={() => toggleClaudeServer(server.name)}
|
||||||
|
className="w-4 h-4 mt-0.5"
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
htmlFor={`claude-${server.name}`}
|
||||||
|
className="flex-1 min-w-0 cursor-pointer"
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-2 flex-wrap">
|
||||||
|
<span className="text-sm font-medium text-foreground truncate">
|
||||||
|
{server.name}
|
||||||
|
</span>
|
||||||
|
{server.enabled && (
|
||||||
|
<Badge variant="success" className="text-xs">
|
||||||
|
{formatMessage({ id: 'mcp.status.enabled' })}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground font-mono truncate">
|
||||||
|
{server.command}
|
||||||
|
</p>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Claude Footer Actions */}
|
||||||
|
{claudeTotal > 0 && (
|
||||||
|
<div className="px-2 py-2 bg-muted/30 border-t border-border flex gap-1">
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={selectAllClaude}
|
||||||
|
className="flex-1 text-xs"
|
||||||
|
>
|
||||||
|
{formatMessage({ id: 'mcp.sync.selectAll' })}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={clearAllClaude}
|
||||||
|
className="flex-1 text-xs"
|
||||||
|
disabled={claudeSelected === 0}
|
||||||
|
>
|
||||||
|
{formatMessage({ id: 'mcp.sync.clearAll' })}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Codex Column */}
|
||||||
|
<div className="border border-border rounded-lg overflow-hidden">
|
||||||
|
{/* Column Header */}
|
||||||
|
<div className="bg-muted/50 px-4 py-3 border-b border-border">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<h4 className="text-sm font-medium text-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.sync.codexColumn' })}
|
||||||
|
</h4>
|
||||||
|
<Badge variant="outline" className="text-xs">
|
||||||
|
{formatMessage(
|
||||||
|
{ id: 'mcp.sync.selectedCount' },
|
||||||
|
{ count: codexSelected, total: codexTotal }
|
||||||
|
)}
|
||||||
|
</Badge>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Codex Server List */}
|
||||||
|
<div className="p-2 max-h-64 overflow-y-auto">
|
||||||
|
{isLoadingCodex ? (
|
||||||
|
<div className="py-8 flex items-center justify-center">
|
||||||
|
<Loader2 className="w-5 h-5 animate-spin text-muted-foreground" />
|
||||||
|
</div>
|
||||||
|
) : codexError ? (
|
||||||
|
<div className="py-8 text-center text-sm text-destructive">
|
||||||
|
{codexError}
|
||||||
|
</div>
|
||||||
|
) : codexTotal === 0 ? (
|
||||||
|
<div className="py-8 text-center text-sm text-muted-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.sync.noServers' })}
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-1">
|
||||||
|
{codexServers.map((server) => (
|
||||||
|
<div
|
||||||
|
key={server.name}
|
||||||
|
className={cn(
|
||||||
|
'flex items-start gap-2 p-2 rounded cursor-pointer transition-colors',
|
||||||
|
server.selected
|
||||||
|
? 'bg-primary/10'
|
||||||
|
: 'hover:bg-muted/50'
|
||||||
|
)}
|
||||||
|
onClick={() => toggleCodexServer(server.name)}
|
||||||
|
>
|
||||||
|
<Checkbox
|
||||||
|
id={`codex-${server.name}`}
|
||||||
|
checked={server.selected}
|
||||||
|
onChange={() => toggleCodexServer(server.name)}
|
||||||
|
className="w-4 h-4 mt-0.5"
|
||||||
|
/>
|
||||||
|
<label
|
||||||
|
htmlFor={`codex-${server.name}`}
|
||||||
|
className="flex-1 min-w-0 cursor-pointer"
|
||||||
|
>
|
||||||
|
<div className="flex items-center gap-2 flex-wrap">
|
||||||
|
<span className="text-sm font-medium text-foreground truncate">
|
||||||
|
{server.name}
|
||||||
|
</span>
|
||||||
|
{server.enabled && (
|
||||||
|
<Badge variant="success" className="text-xs">
|
||||||
|
{formatMessage({ id: 'mcp.status.enabled' })}
|
||||||
|
</Badge>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground font-mono truncate">
|
||||||
|
{server.command}
|
||||||
|
</p>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Codex Footer Actions */}
|
||||||
|
{codexTotal > 0 && (
|
||||||
|
<div className="px-2 py-2 bg-muted/30 border-t border-border flex gap-1">
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={selectAllCodex}
|
||||||
|
className="flex-1 text-xs"
|
||||||
|
>
|
||||||
|
{formatMessage({ id: 'mcp.sync.selectAll' })}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
variant="ghost"
|
||||||
|
size="sm"
|
||||||
|
onClick={clearAllCodex}
|
||||||
|
className="flex-1 text-xs"
|
||||||
|
disabled={codexSelected === 0}
|
||||||
|
>
|
||||||
|
{formatMessage({ id: 'mcp.sync.clearAll' })}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Copy Buttons */}
|
||||||
|
<div className="flex items-center justify-center gap-3">
|
||||||
|
<Button
|
||||||
|
onClick={handleCopyToCodex}
|
||||||
|
disabled={claudeSelected === 0 || isCopying}
|
||||||
|
variant="default"
|
||||||
|
className="min-w-40"
|
||||||
|
>
|
||||||
|
{isCopying ? (
|
||||||
|
<>
|
||||||
|
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
|
||||||
|
{formatMessage({ id: 'mcp.sync.syncInProgress' })}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<ArrowRight className="w-4 h-4 mr-2" />
|
||||||
|
{formatMessage(
|
||||||
|
{ id: 'mcp.sync.copyToCodex' },
|
||||||
|
{ count: claudeSelected }
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
|
||||||
|
<Button
|
||||||
|
onClick={handleCopyFromCodex}
|
||||||
|
disabled={codexSelected === 0 || isCopying || !projectPath}
|
||||||
|
variant="default"
|
||||||
|
className="min-w-40"
|
||||||
|
>
|
||||||
|
{isCopying ? (
|
||||||
|
<>
|
||||||
|
<Loader2 className="w-4 h-4 mr-2 animate-spin" />
|
||||||
|
{formatMessage({ id: 'mcp.sync.syncInProgress' })}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<ArrowLeft className="w-4 h-4 mr-2" />
|
||||||
|
{formatMessage(
|
||||||
|
{ id: 'mcp.sync.copyFromCodex' },
|
||||||
|
{ count: codexSelected }
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default CrossCliSyncPanel;
|
||||||
@@ -3411,6 +3411,113 @@ export async function uninstallCcwMcp(): Promise<void> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========== CCW Tools MCP - Codex API ==========
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetch CCW Tools MCP configuration from Codex config.toml
|
||||||
|
*/
|
||||||
|
export async function fetchCcwMcpConfigForCodex(): Promise<CcwMcpConfig> {
|
||||||
|
try {
|
||||||
|
const { servers } = await fetchCodexMcpServers();
|
||||||
|
const ccwServer = servers.find((s) => s.name === 'ccw-tools');
|
||||||
|
|
||||||
|
if (!ccwServer) {
|
||||||
|
return { isInstalled: false, enabledTools: [] };
|
||||||
|
}
|
||||||
|
|
||||||
|
const env = ccwServer.env || {};
|
||||||
|
const enabledToolsStr = env.CCW_ENABLED_TOOLS || 'all';
|
||||||
|
const enabledTools = enabledToolsStr === 'all'
|
||||||
|
? ['write_file', 'edit_file', 'read_file', 'core_memory', 'ask_question', 'smart_search']
|
||||||
|
: enabledToolsStr.split(',').map((t: string) => t.trim());
|
||||||
|
|
||||||
|
return {
|
||||||
|
isInstalled: true,
|
||||||
|
enabledTools,
|
||||||
|
projectRoot: env.CCW_PROJECT_ROOT,
|
||||||
|
allowedDirs: env.CCW_ALLOWED_DIRS,
|
||||||
|
disableSandbox: env.CCW_DISABLE_SANDBOX === '1',
|
||||||
|
};
|
||||||
|
} catch {
|
||||||
|
return { isInstalled: false, enabledTools: [] };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build CCW MCP server config for Codex (uses global ccw-mcp command)
|
||||||
|
*/
|
||||||
|
function buildCcwMcpServerConfigForCodex(config: {
|
||||||
|
enabledTools?: string[];
|
||||||
|
projectRoot?: string;
|
||||||
|
allowedDirs?: string;
|
||||||
|
disableSandbox?: boolean;
|
||||||
|
}): { command: string; args: string[]; env: Record<string, string> } {
|
||||||
|
const env: Record<string, string> = {};
|
||||||
|
|
||||||
|
if (config.enabledTools && config.enabledTools.length > 0) {
|
||||||
|
env.CCW_ENABLED_TOOLS = config.enabledTools.join(',');
|
||||||
|
} else {
|
||||||
|
env.CCW_ENABLED_TOOLS = 'write_file,edit_file,read_file,core_memory,ask_question,smart_search';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.projectRoot) {
|
||||||
|
env.CCW_PROJECT_ROOT = config.projectRoot;
|
||||||
|
}
|
||||||
|
if (config.allowedDirs) {
|
||||||
|
env.CCW_ALLOWED_DIRS = config.allowedDirs;
|
||||||
|
}
|
||||||
|
if (config.disableSandbox) {
|
||||||
|
env.CCW_DISABLE_SANDBOX = '1';
|
||||||
|
}
|
||||||
|
|
||||||
|
return { command: 'ccw-mcp', args: [], env };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Install CCW Tools MCP to Codex config.toml
|
||||||
|
*/
|
||||||
|
export async function installCcwMcpToCodex(): Promise<CcwMcpConfig> {
|
||||||
|
const serverConfig = buildCcwMcpServerConfigForCodex({
|
||||||
|
enabledTools: ['write_file', 'edit_file', 'read_file', 'core_memory', 'ask_question', 'smart_search'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await addCodexMcpServer('ccw-tools', serverConfig);
|
||||||
|
if (result.error) {
|
||||||
|
throw new Error(result.error || 'Failed to install CCW MCP to Codex');
|
||||||
|
}
|
||||||
|
|
||||||
|
return fetchCcwMcpConfigForCodex();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uninstall CCW Tools MCP from Codex config.toml
|
||||||
|
*/
|
||||||
|
export async function uninstallCcwMcpFromCodex(): Promise<void> {
|
||||||
|
const result = await codexRemoveServer('ccw-tools');
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error || 'Failed to uninstall CCW MCP from Codex');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update CCW Tools MCP configuration in Codex config.toml
|
||||||
|
*/
|
||||||
|
export async function updateCcwConfigForCodex(config: {
|
||||||
|
enabledTools?: string[];
|
||||||
|
projectRoot?: string;
|
||||||
|
allowedDirs?: string;
|
||||||
|
disableSandbox?: boolean;
|
||||||
|
}): Promise<CcwMcpConfig> {
|
||||||
|
const serverConfig = buildCcwMcpServerConfigForCodex(config);
|
||||||
|
|
||||||
|
const result = await addCodexMcpServer('ccw-tools', serverConfig);
|
||||||
|
if (result.error) {
|
||||||
|
throw new Error(result.error || 'Failed to update CCW config in Codex');
|
||||||
|
}
|
||||||
|
|
||||||
|
return fetchCcwMcpConfigForCodex();
|
||||||
|
}
|
||||||
|
|
||||||
// ========== Index Management API ==========
|
// ========== Index Management API ==========
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -151,13 +151,15 @@
|
|||||||
"enableAll": "Enable All",
|
"enableAll": "Enable All",
|
||||||
"disableAll": "Disable All",
|
"disableAll": "Disable All",
|
||||||
"install": "Install CCW MCP",
|
"install": "Install CCW MCP",
|
||||||
|
"installCodex": "Install to Codex",
|
||||||
"installing": "Installing...",
|
"installing": "Installing...",
|
||||||
"uninstall": "Uninstall",
|
"uninstall": "Uninstall",
|
||||||
"uninstalling": "Uninstalling...",
|
"uninstalling": "Uninstalling...",
|
||||||
"uninstallConfirm": "Are you sure you want to uninstall CCW MCP?",
|
"uninstallConfirm": "Are you sure you want to uninstall CCW MCP?",
|
||||||
"saveConfig": "Save Configuration",
|
"saveConfig": "Save Configuration",
|
||||||
"saving": "Saving..."
|
"saving": "Saving..."
|
||||||
}
|
},
|
||||||
|
"codexNote": "Requires: npm install -g claude-code-workflow"
|
||||||
},
|
},
|
||||||
"recommended": {
|
"recommended": {
|
||||||
"title": "Recommended Servers",
|
"title": "Recommended Servers",
|
||||||
@@ -266,6 +268,30 @@
|
|||||||
"copying": "Copying...",
|
"copying": "Copying...",
|
||||||
"copyButton": "Copy to {target}"
|
"copyButton": "Copy to {target}"
|
||||||
},
|
},
|
||||||
|
"sync": {
|
||||||
|
"title": "MCP Server Sync",
|
||||||
|
"description": "Synchronize MCP server configurations between Claude and Codex",
|
||||||
|
"claudeColumn": "Claude Servers",
|
||||||
|
"codexColumn": "Codex Servers",
|
||||||
|
"selectedCount": "Selected: {count} / Total: {total}",
|
||||||
|
"selectAll": "Select All",
|
||||||
|
"clearAll": "Clear All",
|
||||||
|
"copyToCodex": "→ Copy to Codex ({count})",
|
||||||
|
"copyFromCodex": "← Copy from Codex ({count})",
|
||||||
|
"noServers": "No servers available",
|
||||||
|
"codexLoadError": "Failed to load Codex servers",
|
||||||
|
"copySuccess": "Successfully copied {count} server(s)",
|
||||||
|
"copyPartial": "{copied} succeeded, {failed} failed",
|
||||||
|
"syncInProgress": "Syncing..."
|
||||||
|
},
|
||||||
|
"projects": {
|
||||||
|
"title": "Projects Overview",
|
||||||
|
"description": "View MCP server configurations for all projects"
|
||||||
|
},
|
||||||
|
"crossProject": {
|
||||||
|
"title": "Cross-Project Import",
|
||||||
|
"description": "Import MCP server configurations from other projects"
|
||||||
|
},
|
||||||
"allProjects": {
|
"allProjects": {
|
||||||
"title": "All Projects",
|
"title": "All Projects",
|
||||||
"name": "Project Name",
|
"name": "Project Name",
|
||||||
|
|||||||
@@ -151,13 +151,15 @@
|
|||||||
"enableAll": "全部启用",
|
"enableAll": "全部启用",
|
||||||
"disableAll": "全部禁用",
|
"disableAll": "全部禁用",
|
||||||
"install": "安装 CCW MCP",
|
"install": "安装 CCW MCP",
|
||||||
|
"installCodex": "安装到 Codex",
|
||||||
"installing": "安装中...",
|
"installing": "安装中...",
|
||||||
"uninstall": "卸载",
|
"uninstall": "卸载",
|
||||||
"uninstalling": "卸载中...",
|
"uninstalling": "卸载中...",
|
||||||
"uninstallConfirm": "确定要卸载 CCW MCP 吗?",
|
"uninstallConfirm": "确定要卸载 CCW MCP 吗?",
|
||||||
"saveConfig": "保存配置",
|
"saveConfig": "保存配置",
|
||||||
"saving": "保存中..."
|
"saving": "保存中..."
|
||||||
}
|
},
|
||||||
|
"codexNote": "需要全局安装:npm install -g claude-code-workflow"
|
||||||
},
|
},
|
||||||
"recommended": {
|
"recommended": {
|
||||||
"title": "推荐服务器",
|
"title": "推荐服务器",
|
||||||
@@ -266,6 +268,30 @@
|
|||||||
"copying": "复制中...",
|
"copying": "复制中...",
|
||||||
"copyButton": "复制到 {target}"
|
"copyButton": "复制到 {target}"
|
||||||
},
|
},
|
||||||
|
"sync": {
|
||||||
|
"title": "MCP 服务器同步",
|
||||||
|
"description": "在 Claude 和 Codex 之间同步 MCP 服务器配置",
|
||||||
|
"claudeColumn": "Claude 服务器",
|
||||||
|
"codexColumn": "Codex 服务器",
|
||||||
|
"selectedCount": "已选: {count} / 总计: {total}",
|
||||||
|
"selectAll": "全选",
|
||||||
|
"clearAll": "清空",
|
||||||
|
"copyToCodex": "→ 复制到 Codex ({count})",
|
||||||
|
"copyFromCodex": "← 从 Codex 复制 ({count})",
|
||||||
|
"noServers": "没有可用的服务器",
|
||||||
|
"codexLoadError": "加载 Codex 服务器失败",
|
||||||
|
"copySuccess": "成功复制 {count} 个服务器",
|
||||||
|
"copyPartial": "成功 {copied} 个,失败 {failed} 个",
|
||||||
|
"syncInProgress": "同步中..."
|
||||||
|
},
|
||||||
|
"projects": {
|
||||||
|
"title": "项目概览",
|
||||||
|
"description": "查看所有项目的 MCP 服务器配置"
|
||||||
|
},
|
||||||
|
"crossProject": {
|
||||||
|
"title": "跨项目导入",
|
||||||
|
"description": "从其他项目导入 MCP 服务器配置"
|
||||||
|
},
|
||||||
"allProjects": {
|
"allProjects": {
|
||||||
"title": "所有项目",
|
"title": "所有项目",
|
||||||
"name": "项目名称",
|
"name": "项目名称",
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ import { CcwToolsMcpCard } from '@/components/mcp/CcwToolsMcpCard';
|
|||||||
import { McpTemplatesSection, TemplateSaveDialog } from '@/components/mcp/McpTemplatesSection';
|
import { McpTemplatesSection, TemplateSaveDialog } from '@/components/mcp/McpTemplatesSection';
|
||||||
import { RecommendedMcpSection } from '@/components/mcp/RecommendedMcpSection';
|
import { RecommendedMcpSection } from '@/components/mcp/RecommendedMcpSection';
|
||||||
import { WindowsCompatibilityWarning } from '@/components/mcp/WindowsCompatibilityWarning';
|
import { WindowsCompatibilityWarning } from '@/components/mcp/WindowsCompatibilityWarning';
|
||||||
import { CrossCliCopyButton } from '@/components/mcp/CrossCliCopyButton';
|
import { CrossCliSyncPanel } from '@/components/mcp/CrossCliSyncPanel';
|
||||||
import { AllProjectsTable } from '@/components/mcp/AllProjectsTable';
|
import { AllProjectsTable } from '@/components/mcp/AllProjectsTable';
|
||||||
import { OtherProjectsSection } from '@/components/mcp/OtherProjectsSection';
|
import { OtherProjectsSection } from '@/components/mcp/OtherProjectsSection';
|
||||||
import { TabsNavigation } from '@/components/ui/TabsNavigation';
|
import { TabsNavigation } from '@/components/ui/TabsNavigation';
|
||||||
@@ -41,7 +41,9 @@ import { useMcpServers, useMcpServerMutations, useNotifications } from '@/hooks'
|
|||||||
import {
|
import {
|
||||||
fetchCodexMcpServers,
|
fetchCodexMcpServers,
|
||||||
fetchCcwMcpConfig,
|
fetchCcwMcpConfig,
|
||||||
|
fetchCcwMcpConfigForCodex,
|
||||||
updateCcwConfig,
|
updateCcwConfig,
|
||||||
|
updateCcwConfigForCodex,
|
||||||
codexRemoveServer,
|
codexRemoveServer,
|
||||||
codexToggleServer,
|
codexToggleServer,
|
||||||
saveMcpTemplate,
|
saveMcpTemplate,
|
||||||
@@ -255,6 +257,14 @@ export function McpManagerPage() {
|
|||||||
staleTime: 5 * 60 * 1000, // 5 minutes
|
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Fetch CCW Tools MCP configuration (Codex mode only)
|
||||||
|
const ccwMcpCodexQuery = useQuery({
|
||||||
|
queryKey: ['ccwMcpConfigCodex'],
|
||||||
|
queryFn: fetchCcwMcpConfigForCodex,
|
||||||
|
enabled: cliMode === 'codex',
|
||||||
|
staleTime: 5 * 60 * 1000, // 5 minutes
|
||||||
|
});
|
||||||
|
|
||||||
const {
|
const {
|
||||||
toggleServer,
|
toggleServer,
|
||||||
deleteServer,
|
deleteServer,
|
||||||
@@ -358,6 +368,32 @@ export function McpManagerPage() {
|
|||||||
ccwMcpQuery.refetch();
|
ccwMcpQuery.refetch();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// CCW MCP handlers for Codex mode
|
||||||
|
const ccwCodexConfig = ccwMcpCodexQuery.data ?? {
|
||||||
|
isInstalled: false,
|
||||||
|
enabledTools: [],
|
||||||
|
projectRoot: undefined,
|
||||||
|
allowedDirs: undefined,
|
||||||
|
disableSandbox: undefined,
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleToggleCcwToolCodex = async (tool: string, enabled: boolean) => {
|
||||||
|
const updatedTools = enabled
|
||||||
|
? [...ccwCodexConfig.enabledTools, tool]
|
||||||
|
: ccwCodexConfig.enabledTools.filter((t) => t !== tool);
|
||||||
|
await updateCcwConfigForCodex({ enabledTools: updatedTools });
|
||||||
|
ccwMcpCodexQuery.refetch();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleUpdateCcwConfigCodex = async (config: Partial<CcwMcpConfig>) => {
|
||||||
|
await updateCcwConfigForCodex(config);
|
||||||
|
ccwMcpCodexQuery.refetch();
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCcwInstallCodex = () => {
|
||||||
|
ccwMcpCodexQuery.refetch();
|
||||||
|
};
|
||||||
|
|
||||||
// Template handlers
|
// Template handlers
|
||||||
const handleInstallTemplate = (template: any) => {
|
const handleInstallTemplate = (template: any) => {
|
||||||
setEditingServer({
|
setEditingServer({
|
||||||
@@ -617,7 +653,7 @@ export function McpManagerPage() {
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* CCW Tools MCP Card - Claude mode only */}
|
{/* CCW Tools MCP Card */}
|
||||||
{cliMode === 'claude' && (
|
{cliMode === 'claude' && (
|
||||||
<CcwToolsMcpCard
|
<CcwToolsMcpCard
|
||||||
isInstalled={ccwConfig.isInstalled}
|
isInstalled={ccwConfig.isInstalled}
|
||||||
@@ -630,6 +666,19 @@ export function McpManagerPage() {
|
|||||||
onInstall={handleCcwInstall}
|
onInstall={handleCcwInstall}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
|
{cliMode === 'codex' && (
|
||||||
|
<CcwToolsMcpCard
|
||||||
|
target="codex"
|
||||||
|
isInstalled={ccwCodexConfig.isInstalled}
|
||||||
|
enabledTools={ccwCodexConfig.enabledTools}
|
||||||
|
projectRoot={ccwCodexConfig.projectRoot}
|
||||||
|
allowedDirs={ccwCodexConfig.allowedDirs}
|
||||||
|
disableSandbox={ccwCodexConfig.disableSandbox}
|
||||||
|
onToggleTool={handleToggleCcwToolCodex}
|
||||||
|
onUpdateConfig={handleUpdateCcwConfigCodex}
|
||||||
|
onInstall={handleCcwInstallCodex}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Servers List */}
|
{/* Servers List */}
|
||||||
{currentIsLoading ? (
|
{currentIsLoading ? (
|
||||||
@@ -680,37 +729,56 @@ export function McpManagerPage() {
|
|||||||
|
|
||||||
{/* Tab Content: Cross-CLI */}
|
{/* Tab Content: Cross-CLI */}
|
||||||
{activeTab === 'cross-cli' && (
|
{activeTab === 'cross-cli' && (
|
||||||
<div className="mt-4 space-y-4">
|
<div className="mt-4 space-y-6">
|
||||||
{/* Cross-CLI Copy Button */}
|
{/* Section 1: Claude ↔ Codex 同步 */}
|
||||||
<div className="flex items-center justify-between p-4 bg-muted rounded-lg">
|
<section>
|
||||||
<div className="flex-1">
|
<div className="flex items-center gap-2 mb-3">
|
||||||
|
<RefreshCw className="w-4 h-4 text-muted-foreground" />
|
||||||
<h3 className="text-sm font-medium text-foreground">
|
<h3 className="text-sm font-medium text-foreground">
|
||||||
{formatMessage({ id: 'mcp.crossCli.title' })}
|
{formatMessage({ id: 'mcp.sync.title' })}
|
||||||
</h3>
|
</h3>
|
||||||
<p className="text-xs text-muted-foreground mt-1">
|
|
||||||
{formatMessage({ id: 'mcp.crossCli.selectServersHint' })}
|
|
||||||
</p>
|
|
||||||
</div>
|
</div>
|
||||||
<CrossCliCopyButton
|
<Card className="p-4">
|
||||||
currentMode={cliMode}
|
<CrossCliSyncPanel onSuccess={(count, direction) => refetch()} />
|
||||||
onSuccess={() => refetch()}
|
</Card>
|
||||||
|
</section>
|
||||||
|
|
||||||
|
{/* Section 2: 项目概览 */}
|
||||||
|
<section>
|
||||||
|
<div className="flex items-center gap-2 mb-3">
|
||||||
|
<Folder className="w-4 h-4 text-muted-foreground" />
|
||||||
|
<h3 className="text-sm font-medium text-foreground">
|
||||||
|
{formatMessage({ id: 'mcp.projects.title' })}
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mb-3">
|
||||||
|
{formatMessage({ id: 'mcp.projects.description' })}
|
||||||
|
</p>
|
||||||
|
<AllProjectsTable
|
||||||
|
maxProjects={10}
|
||||||
|
onProjectClick={(path) => console.log('Open project:', path)}
|
||||||
|
onOpenNewWindow={(path) => window.open(`/?project=${encodeURIComponent(path)}`, '_blank')}
|
||||||
/>
|
/>
|
||||||
</div>
|
</section>
|
||||||
|
|
||||||
{/* All Projects Table */}
|
{/* Section 3: 跨项目导入 */}
|
||||||
<AllProjectsTable
|
<section>
|
||||||
maxProjects={10}
|
<div className="flex items-center gap-2 mb-3">
|
||||||
onProjectClick={(path) => console.log('Open project:', path)}
|
<Globe className="w-4 h-4 text-muted-foreground" />
|
||||||
onOpenNewWindow={(path) => window.open(`/?project=${encodeURIComponent(path)}`, '_blank')}
|
<h3 className="text-sm font-medium text-foreground">
|
||||||
/>
|
{formatMessage({ id: 'mcp.crossProject.title' })}
|
||||||
|
</h3>
|
||||||
{/* Other Projects Section */}
|
</div>
|
||||||
<OtherProjectsSection
|
<p className="text-xs text-muted-foreground mb-3">
|
||||||
onImportSuccess={(serverName, sourceProject) => {
|
{formatMessage({ id: 'mcp.crossProject.description' })}
|
||||||
console.log('Imported server:', serverName, 'from:', sourceProject);
|
</p>
|
||||||
refetch();
|
<OtherProjectsSection
|
||||||
}}
|
onImportSuccess={(serverName, sourceProject) => {
|
||||||
/>
|
console.log('Imported server:', serverName, 'from:', sourceProject);
|
||||||
|
refetch();
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</section>
|
||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
|
|||||||
1
ccw/src/.gitignore
vendored
Normal file
1
ccw/src/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.ace-tool/
|
||||||
@@ -222,20 +222,30 @@ export class A2UIWebSocketHandler {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const req = http.request({
|
const req = http.request({
|
||||||
hostname: 'localhost',
|
hostname: '127.0.0.1',
|
||||||
port: DASHBOARD_PORT,
|
port: DASHBOARD_PORT,
|
||||||
path: '/api/hook',
|
path: '/api/hook',
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
|
timeout: 2000,
|
||||||
headers: {
|
headers: {
|
||||||
'Content-Type': 'application/json',
|
'Content-Type': 'application/json',
|
||||||
'Content-Length': Buffer.byteLength(body),
|
'Content-Length': Buffer.byteLength(body),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Fire-and-forget: don't keep the process alive due to an open socket
|
||||||
|
req.on('socket', (socket) => {
|
||||||
|
socket.unref();
|
||||||
|
});
|
||||||
|
|
||||||
req.on('error', (err) => {
|
req.on('error', (err) => {
|
||||||
console.error(`[A2UI] Failed to forward surface ${surfaceUpdate.surfaceId} to Dashboard:`, err.message);
|
console.error(`[A2UI] Failed to forward surface ${surfaceUpdate.surfaceId} to Dashboard:`, err.message);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
req.on('timeout', () => {
|
||||||
|
req.destroy(new Error('Request timed out'));
|
||||||
|
});
|
||||||
|
|
||||||
req.write(body);
|
req.write(body);
|
||||||
req.end();
|
req.end();
|
||||||
|
|
||||||
|
|||||||
@@ -90,6 +90,7 @@ const LOCALHOST_PUBLIC_PATHS = [
|
|||||||
'/api/litellm-api/providers',
|
'/api/litellm-api/providers',
|
||||||
'/api/litellm-api/endpoints',
|
'/api/litellm-api/endpoints',
|
||||||
'/api/health',
|
'/api/health',
|
||||||
|
'/api/a2ui/answer',
|
||||||
];
|
];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
@@ -562,11 +562,17 @@ function startAnswerPolling(questionId: string, isComposite: boolean = false): v
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const req = http.get({ hostname: '127.0.0.1', port: DASHBOARD_PORT, path: pollPath }, (res) => {
|
const req = http.get({ hostname: '127.0.0.1', port: DASHBOARD_PORT, path: pollPath, timeout: 2000 }, (res) => {
|
||||||
let data = '';
|
let data = '';
|
||||||
res.on('data', (chunk: Buffer) => { data += chunk.toString(); });
|
res.on('data', (chunk: Buffer) => { data += chunk.toString(); });
|
||||||
res.on('end', () => {
|
res.on('end', () => {
|
||||||
try {
|
try {
|
||||||
|
if (res.statusCode && res.statusCode >= 400) {
|
||||||
|
console.error(`[A2UI-Poll] HTTP ${res.statusCode} from Dashboard (first 200 chars):`, data.slice(0, 200));
|
||||||
|
setTimeout(poll, POLL_INTERVAL_MS);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const parsed = JSON.parse(data);
|
const parsed = JSON.parse(data);
|
||||||
if (parsed.pending) {
|
if (parsed.pending) {
|
||||||
// No answer yet, schedule next poll
|
// No answer yet, schedule next poll
|
||||||
@@ -599,6 +605,10 @@ function startAnswerPolling(questionId: string, isComposite: boolean = false): v
|
|||||||
setTimeout(poll, POLL_INTERVAL_MS);
|
setTimeout(poll, POLL_INTERVAL_MS);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
req.on('timeout', () => {
|
||||||
|
req.destroy(new Error('Request timed out'));
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Start first poll after a short delay to give the Dashboard time to receive the surface
|
// Start first poll after a short delay to give the Dashboard time to receive the surface
|
||||||
|
|||||||
@@ -23,25 +23,64 @@ const EditItemSchema = z.object({
|
|||||||
newText: z.string(),
|
newText: z.string(),
|
||||||
});
|
});
|
||||||
|
|
||||||
const ParamsSchema = z.object({
|
// Base schema with common parameters
|
||||||
|
const BaseParamsSchema = z.object({
|
||||||
path: z.string().min(1, 'Path is required'),
|
path: z.string().min(1, 'Path is required'),
|
||||||
mode: z.enum(['update', 'line']).default('update'),
|
|
||||||
dryRun: z.boolean().default(false),
|
dryRun: z.boolean().default(false),
|
||||||
// Update mode params
|
});
|
||||||
|
|
||||||
|
// Update mode schema
|
||||||
|
const UpdateModeSchema = BaseParamsSchema.extend({
|
||||||
|
mode: z.literal('update').default('update'),
|
||||||
oldText: z.string().optional(),
|
oldText: z.string().optional(),
|
||||||
newText: z.string().optional(),
|
newText: z.string().optional(),
|
||||||
edits: z.array(EditItemSchema).optional(),
|
edits: z.array(EditItemSchema).optional(),
|
||||||
replaceAll: z.boolean().optional(),
|
replaceAll: z.boolean().default(false),
|
||||||
// Line mode params
|
}).refine(
|
||||||
operation: z.enum(['insert_before', 'insert_after', 'replace', 'delete']).optional(),
|
(data) => {
|
||||||
line: z.number().optional(),
|
const hasSingle = data.oldText !== undefined;
|
||||||
end_line: z.number().optional(),
|
const hasBatch = data.edits !== undefined;
|
||||||
|
// XOR: Only one of oldText/newText or edits should be provided
|
||||||
|
return hasSingle !== hasBatch || (!hasSingle && !hasBatch);
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message: 'Use either oldText/newText or edits array, not both',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Line mode schema
|
||||||
|
const LineModeSchema = BaseParamsSchema.extend({
|
||||||
|
mode: z.literal('line'),
|
||||||
|
operation: z.enum(['insert_before', 'insert_after', 'replace', 'delete']),
|
||||||
|
line: z.number().int().positive('Line must be a positive integer'),
|
||||||
|
end_line: z.number().int().positive().optional(),
|
||||||
text: z.string().optional(),
|
text: z.string().optional(),
|
||||||
});
|
}).refine(
|
||||||
|
(data) => {
|
||||||
|
// text is required for insert_before, insert_after, and replace operations
|
||||||
|
if (['insert_before', 'insert_after', 'replace'].includes(data.operation)) {
|
||||||
|
return data.text !== undefined;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
{
|
||||||
|
message: 'Parameter "text" is required for insert_before, insert_after, and replace operations',
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
// Discriminated union schema
|
||||||
|
const ParamsSchema = z.discriminatedUnion('mode', [
|
||||||
|
UpdateModeSchema,
|
||||||
|
LineModeSchema,
|
||||||
|
]);
|
||||||
|
|
||||||
type Params = z.infer<typeof ParamsSchema>;
|
type Params = z.infer<typeof ParamsSchema>;
|
||||||
type EditItem = z.infer<typeof EditItemSchema>;
|
type EditItem = z.infer<typeof EditItemSchema>;
|
||||||
|
|
||||||
|
// Extract specific types for each mode
|
||||||
|
type UpdateModeParams = z.infer<typeof UpdateModeSchema>;
|
||||||
|
type LineModeParams = z.infer<typeof LineModeSchema>;
|
||||||
|
|
||||||
interface UpdateModeResult {
|
interface UpdateModeResult {
|
||||||
content: string;
|
content: string;
|
||||||
modified: boolean;
|
modified: boolean;
|
||||||
@@ -229,7 +268,7 @@ function createUnifiedDiff(original: string, modified: string, filePath: string)
|
|||||||
* Auto-adapts line endings (CRLF/LF)
|
* Auto-adapts line endings (CRLF/LF)
|
||||||
* Supports multiple edits via 'edits' array
|
* Supports multiple edits via 'edits' array
|
||||||
*/
|
*/
|
||||||
function executeUpdateMode(content: string, params: Params, filePath: string): UpdateModeResult {
|
function executeUpdateMode(content: string, params: UpdateModeParams, filePath: string): UpdateModeResult {
|
||||||
const { oldText, newText, replaceAll, edits, dryRun = false } = params;
|
const { oldText, newText, replaceAll, edits, dryRun = false } = params;
|
||||||
|
|
||||||
// Detect original line ending
|
// Detect original line ending
|
||||||
@@ -334,11 +373,10 @@ function executeUpdateMode(content: string, params: Params, filePath: string): U
|
|||||||
* Mode: line - Line-based operations
|
* Mode: line - Line-based operations
|
||||||
* Operations: insert_before, insert_after, replace, delete
|
* Operations: insert_before, insert_after, replace, delete
|
||||||
*/
|
*/
|
||||||
function executeLineMode(content: string, params: Params): LineModeResult {
|
function executeLineMode(content: string, params: LineModeParams): LineModeResult {
|
||||||
const { operation, line, text, end_line } = params;
|
const { operation, line, text, end_line } = params;
|
||||||
|
|
||||||
if (!operation) throw new Error('Parameter "operation" is required for line mode');
|
// No need for additional validation - Zod schema already ensures required fields
|
||||||
if (line === undefined) throw new Error('Parameter "line" is required for line mode');
|
|
||||||
|
|
||||||
// Detect original line ending and normalize for processing
|
// Detect original line ending and normalize for processing
|
||||||
const hasCRLF = content.includes('\r\n');
|
const hasCRLF = content.includes('\r\n');
|
||||||
@@ -418,15 +456,30 @@ export const schema: ToolSchema = {
|
|||||||
name: 'edit_file',
|
name: 'edit_file',
|
||||||
description: `Edit file using two modes: "update" for text replacement (default) and "line" for line-based operations.
|
description: `Edit file using two modes: "update" for text replacement (default) and "line" for line-based operations.
|
||||||
|
|
||||||
Usage (update mode):
|
**Update Mode** (default):
|
||||||
|
- Use oldText/newText for single replacement OR edits for multiple replacements
|
||||||
|
- Parameters: oldText, newText, replaceAll, dryRun
|
||||||
|
- Cannot use line mode parameters (operation, line, end_line, text)
|
||||||
|
- Validation: oldText/newText and edits are mutually exclusive
|
||||||
|
|
||||||
|
**Line Mode**:
|
||||||
|
- Use for precise line-based operations
|
||||||
|
- Parameters: operation (insert_before/insert_after/replace/delete), line, end_line, text, dryRun
|
||||||
|
- Cannot use update mode parameters (oldText, newText, edits, replaceAll)
|
||||||
|
|
||||||
|
Usage (update mode - single replacement):
|
||||||
edit_file(path="f.js", oldText="old", newText="new")
|
edit_file(path="f.js", oldText="old", newText="new")
|
||||||
|
|
||||||
|
Usage (update mode - multiple replacements):
|
||||||
edit_file(path="f.js", edits=[{oldText:"a",newText:"b"},{oldText:"c",newText:"d"}])
|
edit_file(path="f.js", edits=[{oldText:"a",newText:"b"},{oldText:"c",newText:"d"}])
|
||||||
|
|
||||||
Usage (line mode):
|
Usage (line mode):
|
||||||
edit_file(path="f.js", mode="line", operation="insert_after", line=10, text="new line")
|
edit_file(path="f.js", mode="line", operation="insert_after", line=10, text="new line")
|
||||||
edit_file(path="f.js", mode="line", operation="delete", line=5, end_line=8)
|
edit_file(path="f.js", mode="line", operation="delete", line=5, end_line=8)
|
||||||
|
|
||||||
Options: dryRun=true (preview diff), replaceAll=true (update mode only)`,
|
Options: dryRun=true (preview diff), replaceAll=true (update mode only)
|
||||||
|
|
||||||
|
**Important**: Each mode only accepts its own parameters. Providing parameters from both modes will cause a validation error.`,
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
@@ -448,7 +501,7 @@ Options: dryRun=true (preview diff), replaceAll=true (update mode only)`,
|
|||||||
// Update mode params
|
// Update mode params
|
||||||
oldText: {
|
oldText: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: '[update mode] Text to find and replace (use oldText/newText OR edits array)',
|
description: '[update mode] Text to find and replace. **Mutually exclusive with edits parameter** - use either oldText/newText or edits, not both.',
|
||||||
},
|
},
|
||||||
newText: {
|
newText: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
@@ -456,7 +509,7 @@ Options: dryRun=true (preview diff), replaceAll=true (update mode only)`,
|
|||||||
},
|
},
|
||||||
edits: {
|
edits: {
|
||||||
type: 'array',
|
type: 'array',
|
||||||
description: '[update mode] Array of {oldText, newText} for multiple replacements',
|
description: '[update mode] Array of {oldText, newText} for multiple replacements. **Mutually exclusive with oldText/newText** - use either oldText/newText or edits, not both.',
|
||||||
items: {
|
items: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
properties: {
|
properties: {
|
||||||
@@ -474,19 +527,19 @@ Options: dryRun=true (preview diff), replaceAll=true (update mode only)`,
|
|||||||
operation: {
|
operation: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
enum: ['insert_before', 'insert_after', 'replace', 'delete'],
|
enum: ['insert_before', 'insert_after', 'replace', 'delete'],
|
||||||
description: '[line mode] Line operation type',
|
description: '[line mode] Line operation type. **Only valid in line mode** - cannot be combined with update mode parameters.',
|
||||||
},
|
},
|
||||||
line: {
|
line: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: '[line mode] Line number (1-based)',
|
description: '[line mode] Line number (1-based). **Only valid in line mode** - cannot be combined with update mode parameters.',
|
||||||
},
|
},
|
||||||
end_line: {
|
end_line: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: '[line mode] End line for range operations',
|
description: '[line mode] End line for range operations. **Only valid in line mode** - cannot be combined with update mode parameters.',
|
||||||
},
|
},
|
||||||
text: {
|
text: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
description: '[line mode] Text for insert/replace operations',
|
description: '[line mode] Text for insert/replace operations. **Only valid in line mode** - cannot be combined with update mode parameters.',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
required: ['path'],
|
required: ['path'],
|
||||||
@@ -522,21 +575,18 @@ export async function handler(params: Record<string, unknown>): Promise<ToolResu
|
|||||||
return { success: false, error: `Invalid params: ${parsed.error.message}` };
|
return { success: false, error: `Invalid params: ${parsed.error.message}` };
|
||||||
}
|
}
|
||||||
|
|
||||||
const { path: filePath, mode = 'update', dryRun = false } = parsed.data;
|
const { path: filePath, mode, dryRun } = parsed.data;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { resolvedPath, content } = await readFile(filePath);
|
const { resolvedPath, content } = await readFile(filePath);
|
||||||
|
|
||||||
let result: UpdateModeResult | LineModeResult;
|
let result: UpdateModeResult | LineModeResult;
|
||||||
switch (mode) {
|
// Use discriminated union for type narrowing
|
||||||
case 'update':
|
if (mode === 'line') {
|
||||||
result = executeUpdateMode(content, parsed.data, filePath);
|
result = executeLineMode(content, parsed.data as LineModeParams);
|
||||||
break;
|
} else {
|
||||||
case 'line':
|
// mode is 'update' (default)
|
||||||
result = executeLineMode(content, parsed.data);
|
result = executeUpdateMode(content, parsed.data as UpdateModeParams, filePath);
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error(`Unknown mode: ${mode}. Valid modes: update, line`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Write if modified and not dry run
|
// Write if modified and not dry run
|
||||||
|
|||||||
@@ -32,6 +32,14 @@ const ParamsSchema = z.object({
|
|||||||
maxFiles: z.number().default(MAX_FILES).describe('Max number of files to return'),
|
maxFiles: z.number().default(MAX_FILES).describe('Max number of files to return'),
|
||||||
offset: z.number().min(0).optional().describe('Line offset to start reading from (0-based, for single file only)'),
|
offset: z.number().min(0).optional().describe('Line offset to start reading from (0-based, for single file only)'),
|
||||||
limit: z.number().min(1).optional().describe('Number of lines to read (for single file only)'),
|
limit: z.number().min(1).optional().describe('Number of lines to read (for single file only)'),
|
||||||
|
}).refine((data) => {
|
||||||
|
// Validate: offset/limit only allowed for single file mode
|
||||||
|
const hasPagination = data.offset !== undefined || data.limit !== undefined;
|
||||||
|
const isMultiple = Array.isArray(data.paths) && data.paths.length > 1;
|
||||||
|
return !(hasPagination && isMultiple);
|
||||||
|
}, {
|
||||||
|
message: 'offset/limit parameters are only supported for single file mode. Cannot use with multiple paths.',
|
||||||
|
path: ['offset', 'limit', 'paths'],
|
||||||
});
|
});
|
||||||
|
|
||||||
type Params = z.infer<typeof ParamsSchema>;
|
type Params = z.infer<typeof ParamsSchema>;
|
||||||
@@ -267,12 +275,12 @@ Returns compact file list with optional content. Use offset/limit for large file
|
|||||||
},
|
},
|
||||||
offset: {
|
offset: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: 'Line offset to start reading from (0-based, for single file only)',
|
description: 'Line offset to start reading from (0-based). **Only for single file mode** - validation error if used with multiple paths.',
|
||||||
minimum: 0,
|
minimum: 0,
|
||||||
},
|
},
|
||||||
limit: {
|
limit: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: 'Number of lines to read (for single file only)',
|
description: 'Number of lines to read. **Only for single file mode** - validation error if used with multiple paths.',
|
||||||
minimum: 1,
|
minimum: 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
271
ccw/tests/e2e/ask-question-answer-broker.e2e.test.ts
Normal file
271
ccw/tests/e2e/ask-question-answer-broker.e2e.test.ts
Normal file
@@ -0,0 +1,271 @@
|
|||||||
|
/**
|
||||||
|
* E2E: ask_question Answer Broker
|
||||||
|
*
|
||||||
|
* Verifies that when the MCP server runs as a separate stdio process (no local WS clients),
|
||||||
|
* `ask_question` forwards the surface to the Dashboard via /api/hook and later retrieves
|
||||||
|
* the user's answer via /api/a2ui/answer polling.
|
||||||
|
*/
|
||||||
|
import { after, before, describe, it, mock } from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import http from 'node:http';
|
||||||
|
import { spawn, type ChildProcess } from 'node:child_process';
|
||||||
|
import { mkdtempSync, rmSync } from 'node:fs';
|
||||||
|
import { tmpdir } from 'node:os';
|
||||||
|
import { dirname, join } from 'node:path';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
const serverUrl = new URL('../../dist/core/server.js', import.meta.url);
|
||||||
|
serverUrl.searchParams.set('t', String(Date.now()));
|
||||||
|
|
||||||
|
interface JsonRpcRequest {
|
||||||
|
jsonrpc: string;
|
||||||
|
id: number;
|
||||||
|
method: string;
|
||||||
|
params: any;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JsonRpcResponse {
|
||||||
|
jsonrpc: string;
|
||||||
|
id: number;
|
||||||
|
result?: any;
|
||||||
|
error?: { code: number; message: string; data?: any };
|
||||||
|
}
|
||||||
|
|
||||||
|
class McpClient {
|
||||||
|
private serverProcess!: ChildProcess;
|
||||||
|
private requestId = 0;
|
||||||
|
private pendingRequests = new Map<number, { resolve: (r: JsonRpcResponse) => void; reject: (e: Error) => void }>();
|
||||||
|
|
||||||
|
private env: Record<string, string | undefined>;
|
||||||
|
|
||||||
|
constructor(env: Record<string, string | undefined>) {
|
||||||
|
this.env = env;
|
||||||
|
}
|
||||||
|
|
||||||
|
async start(): Promise<void> {
|
||||||
|
const serverPath = join(__dirname, '../../bin/ccw-mcp.js');
|
||||||
|
this.serverProcess = spawn('node', [serverPath], {
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe'],
|
||||||
|
env: { ...process.env, ...this.env },
|
||||||
|
});
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const timeout = setTimeout(() => reject(new Error('MCP server start timeout')), 15000);
|
||||||
|
this.serverProcess.stderr!.on('data', (data) => {
|
||||||
|
const message = data.toString();
|
||||||
|
if (message.includes('started') || message.includes('ccw-tools')) {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.serverProcess.on('error', (err) => {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
reject(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.serverProcess.stdout!.on('data', (data) => {
|
||||||
|
try {
|
||||||
|
const lines = data.toString().split('\n').filter((l: string) => l.trim());
|
||||||
|
for (const line of lines) {
|
||||||
|
const response: JsonRpcResponse = JSON.parse(line);
|
||||||
|
const pending = this.pendingRequests.get(response.id);
|
||||||
|
if (pending) {
|
||||||
|
this.pendingRequests.delete(response.id);
|
||||||
|
pending.resolve(response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore parse errors
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async call(method: string, params: any = {}, timeoutMs = 10000): Promise<JsonRpcResponse> {
|
||||||
|
const id = ++this.requestId;
|
||||||
|
const request: JsonRpcRequest = { jsonrpc: '2.0', id, method, params };
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const timeout = setTimeout(() => {
|
||||||
|
this.pendingRequests.delete(id);
|
||||||
|
reject(new Error(`Request timeout for ${method}`));
|
||||||
|
}, timeoutMs);
|
||||||
|
|
||||||
|
this.pendingRequests.set(id, {
|
||||||
|
resolve: (response) => {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
resolve(response);
|
||||||
|
},
|
||||||
|
reject: (error) => {
|
||||||
|
clearTimeout(timeout);
|
||||||
|
reject(error);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
this.serverProcess.stdin!.write(JSON.stringify(request) + '\n');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
stop(): void {
|
||||||
|
this.serverProcess?.kill();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function waitForWebSocketOpen(ws: WebSocket, timeoutMs = 10000): Promise<void> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const t = setTimeout(() => reject(new Error('WebSocket open timeout')), timeoutMs);
|
||||||
|
ws.addEventListener('open', () => {
|
||||||
|
clearTimeout(t);
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
ws.addEventListener('error', () => {
|
||||||
|
clearTimeout(t);
|
||||||
|
reject(new Error('WebSocket error'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function waitForA2UISurface(ws: WebSocket, timeoutMs = 15000): Promise<any> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const t = setTimeout(() => reject(new Error('Timed out waiting for a2ui-surface')), timeoutMs);
|
||||||
|
const handler = (event: MessageEvent) => {
|
||||||
|
try {
|
||||||
|
const data = JSON.parse(String(event.data));
|
||||||
|
if (data?.type === 'a2ui-surface' && data?.payload?.initialState?.questionId) {
|
||||||
|
clearTimeout(t);
|
||||||
|
ws.removeEventListener('message', handler);
|
||||||
|
resolve(data);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
};
|
||||||
|
ws.addEventListener('message', handler);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function httpRequest(options: http.RequestOptions, body?: string, timeout = 10000): Promise<{ status: number; body: string }> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const req = http.request(options, (res) => {
|
||||||
|
let data = '';
|
||||||
|
res.on('data', (chunk) => (data += chunk));
|
||||||
|
res.on('end', () => resolve({ status: res.statusCode || 0, body: data }));
|
||||||
|
});
|
||||||
|
req.on('error', reject);
|
||||||
|
req.setTimeout(timeout, () => {
|
||||||
|
req.destroy();
|
||||||
|
reject(new Error('Request timeout'));
|
||||||
|
});
|
||||||
|
if (body) req.write(body);
|
||||||
|
req.end();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('E2E: ask_question Answer Broker', async () => {
|
||||||
|
let server: http.Server;
|
||||||
|
let port: number;
|
||||||
|
let projectRoot: string;
|
||||||
|
const originalCwd = process.cwd();
|
||||||
|
let mcp: McpClient;
|
||||||
|
let ws: WebSocket;
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
process.env.CCW_DISABLE_WARMUP = '1';
|
||||||
|
|
||||||
|
projectRoot = mkdtempSync(join(tmpdir(), 'ccw-e2e-askq-'));
|
||||||
|
process.chdir(projectRoot);
|
||||||
|
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
|
const serverMod: any = await import(serverUrl.href);
|
||||||
|
mock.method(console, 'log', () => {});
|
||||||
|
mock.method(console, 'error', () => {});
|
||||||
|
|
||||||
|
server = await serverMod.startServer({ initialPath: projectRoot, port: 0 });
|
||||||
|
const addr = server.address();
|
||||||
|
port = typeof addr === 'object' && addr ? addr.port : 0;
|
||||||
|
assert.ok(port > 0, 'Server should start on a valid port');
|
||||||
|
|
||||||
|
ws = new WebSocket(`ws://127.0.0.1:${port}/ws`);
|
||||||
|
await waitForWebSocketOpen(ws);
|
||||||
|
|
||||||
|
mcp = new McpClient({
|
||||||
|
CCW_PROJECT_ROOT: projectRoot,
|
||||||
|
CCW_ENABLED_TOOLS: 'all',
|
||||||
|
CCW_PORT: String(port),
|
||||||
|
CCW_DISABLE_WARMUP: '1',
|
||||||
|
});
|
||||||
|
await mcp.start();
|
||||||
|
|
||||||
|
// Sanity: broker endpoint should be reachable without auth from localhost
|
||||||
|
const broker = await httpRequest({ hostname: '127.0.0.1', port, path: '/api/a2ui/answer?questionId=nonexistent', method: 'GET' });
|
||||||
|
assert.equal(broker.status, 200);
|
||||||
|
});
|
||||||
|
|
||||||
|
after(async () => {
|
||||||
|
try {
|
||||||
|
ws?.close();
|
||||||
|
} catch {}
|
||||||
|
mcp?.stop();
|
||||||
|
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
server.close(() => resolve());
|
||||||
|
});
|
||||||
|
process.chdir(originalCwd);
|
||||||
|
rmSync(projectRoot, { recursive: true, force: true });
|
||||||
|
mock.restoreAll();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('returns the answered value via MCP tool call', async () => {
|
||||||
|
const questionId = `e2e-q-${Date.now()}`;
|
||||||
|
|
||||||
|
const toolCallPromise = mcp.call(
|
||||||
|
'tools/call',
|
||||||
|
{
|
||||||
|
name: 'ask_question',
|
||||||
|
arguments: {
|
||||||
|
question: {
|
||||||
|
id: questionId,
|
||||||
|
type: 'confirm',
|
||||||
|
title: 'E2E Confirm',
|
||||||
|
message: 'Confirm this in the test harness',
|
||||||
|
},
|
||||||
|
timeout: 15000,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
30000,
|
||||||
|
);
|
||||||
|
|
||||||
|
const surfaceMsg = await waitForA2UISurface(ws, 15000);
|
||||||
|
const surfaceId = surfaceMsg.payload.surfaceId as string;
|
||||||
|
const receivedQuestionId = surfaceMsg.payload.initialState.questionId as string;
|
||||||
|
assert.equal(receivedQuestionId, questionId);
|
||||||
|
|
||||||
|
ws.send(
|
||||||
|
JSON.stringify({
|
||||||
|
type: 'a2ui-action',
|
||||||
|
actionId: 'confirm',
|
||||||
|
surfaceId,
|
||||||
|
parameters: { questionId },
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await toolCallPromise;
|
||||||
|
assert.equal(response.jsonrpc, '2.0');
|
||||||
|
assert.ok(response.result);
|
||||||
|
assert.ok(Array.isArray(response.result.content));
|
||||||
|
|
||||||
|
const text = response.result.content[0]?.text as string;
|
||||||
|
const parsed = JSON.parse(text);
|
||||||
|
const resultObj = parsed.result ?? parsed;
|
||||||
|
|
||||||
|
assert.equal(resultObj.success, true);
|
||||||
|
assert.equal(resultObj.cancelled, false);
|
||||||
|
assert.ok(Array.isArray(resultObj.answers));
|
||||||
|
assert.equal(resultObj.answers[0].questionId, questionId);
|
||||||
|
assert.equal(resultObj.answers[0].value, true);
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -5,7 +5,8 @@
|
|||||||
* Tests that bash -c commands use single quotes to avoid jq escaping issues
|
* Tests that bash -c commands use single quotes to avoid jq escaping issues
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { describe, it, expect } from 'vitest';
|
import { describe, it } from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
|
||||||
// Import the convertToClaudeCodeFormat function logic
|
// Import the convertToClaudeCodeFormat function logic
|
||||||
// Since it's in a browser JS file, we'll recreate it here for testing
|
// Since it's in a browser JS file, we'll recreate it here for testing
|
||||||
@@ -58,9 +59,9 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
|
|
||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
|
|
||||||
expect(result.hooks[0].command).toMatch(/^bash -c '/);
|
assert.match(result.hooks[0].command, /^bash -c '/);
|
||||||
expect(result.hooks[0].command).toMatch(/'$/);
|
assert.match(result.hooks[0].command, /'$/);
|
||||||
expect(result.hooks[0].command).not.toMatch(/^bash -c "/);
|
assert.doesNotMatch(result.hooks[0].command, /^bash -c "/);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should preserve jq command double quotes without excessive escaping', () => {
|
it('should preserve jq command double quotes without excessive escaping', () => {
|
||||||
@@ -73,9 +74,9 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
const cmd = result.hooks[0].command;
|
const cmd = result.hooks[0].command;
|
||||||
|
|
||||||
// The jq pattern should remain readable
|
// The jq pattern should remain readable
|
||||||
expect(cmd).toContain('jq -r ".tool_input.command // empty"');
|
assert.ok(cmd.includes('jq -r ".tool_input.command // empty"'));
|
||||||
// Should not have excessive escaping like \\\"
|
// Should not have excessive escaping like \\\"
|
||||||
expect(cmd).not.toContain('\\\\\\"');
|
assert.ok(!cmd.includes('\\\\\\"'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should correctly escape single quotes in script using \'\\\'\'', () => {
|
it('should correctly escape single quotes in script using \'\\\'\'', () => {
|
||||||
@@ -88,8 +89,8 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
const cmd = result.hooks[0].command;
|
const cmd = result.hooks[0].command;
|
||||||
|
|
||||||
// Single quotes should be escaped as '\''
|
// Single quotes should be escaped as '\''
|
||||||
expect(cmd).toContain("'\\''");
|
assert.ok(cmd.includes("'\\''"));
|
||||||
expect(cmd).toBe("bash -c 'echo '\\''hello world'\\'''");
|
assert.equal(cmd, "bash -c 'echo '\\''hello world'\\'''");
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle danger-bash-confirm hook template correctly', () => {
|
it('should handle danger-bash-confirm hook template correctly', () => {
|
||||||
@@ -102,11 +103,11 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
const cmd = result.hooks[0].command;
|
const cmd = result.hooks[0].command;
|
||||||
|
|
||||||
// Should use single quotes
|
// Should use single quotes
|
||||||
expect(cmd).toMatch(/^bash -c '/);
|
assert.match(cmd, /^bash -c '/);
|
||||||
// jq pattern should be intact
|
// jq pattern should be intact
|
||||||
expect(cmd).toContain('jq -r ".tool_input.command // empty"');
|
assert.ok(cmd.includes('jq -r ".tool_input.command // empty"'));
|
||||||
// JSON output should have escaped double quotes (in shell)
|
// JSON output should have escaped double quotes (in shell)
|
||||||
expect(cmd).toContain('{\\"hookSpecificOutput\\"');
|
assert.ok(cmd.includes('{\\"hookSpecificOutput\\"'));
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle non-bash commands with original logic', () => {
|
it('should handle non-bash commands with original logic', () => {
|
||||||
@@ -117,7 +118,7 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
|
|
||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
|
|
||||||
expect(result.hooks[0].command).toBe('ccw memory track --type file --action read');
|
assert.equal(result.hooks[0].command, 'ccw memory track --type file --action read');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle bash commands without -c flag with original logic', () => {
|
it('should handle bash commands without -c flag with original logic', () => {
|
||||||
@@ -128,7 +129,7 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
|
|
||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
|
|
||||||
expect(result.hooks[0].command).toBe('bash script.sh --arg value');
|
assert.equal(result.hooks[0].command, 'bash script.sh --arg value');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle args with spaces in non-bash commands', () => {
|
it('should handle args with spaces in non-bash commands', () => {
|
||||||
@@ -139,7 +140,7 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
|
|
||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
|
|
||||||
expect(result.hooks[0].command).toBe('echo "hello world" "another arg"');
|
assert.equal(result.hooks[0].command, 'echo "hello world" "another arg"');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle already formatted hook data', () => {
|
it('should handle already formatted hook data', () => {
|
||||||
@@ -152,7 +153,7 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
|
|
||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
|
|
||||||
expect(result).toBe(hookData);
|
assert.equal(result, hookData);
|
||||||
});
|
});
|
||||||
|
|
||||||
it('should handle additional args after bash -c script', () => {
|
it('should handle additional args after bash -c script', () => {
|
||||||
@@ -164,8 +165,8 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
const result = convertToClaudeCodeFormat(hookData);
|
const result = convertToClaudeCodeFormat(hookData);
|
||||||
const cmd = result.hooks[0].command;
|
const cmd = result.hooks[0].command;
|
||||||
|
|
||||||
expect(cmd).toMatch(/^bash -c 'echo \$1'/);
|
assert.match(cmd, /^bash -c 'echo \$1'/);
|
||||||
expect(cmd).toContain('"hello world"');
|
assert.ok(cmd.includes('"hello world"'));
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -195,11 +196,11 @@ describe('Hook Quoting Fix (Issue #73)', () => {
|
|||||||
const cmd = result.hooks[0].command;
|
const cmd = result.hooks[0].command;
|
||||||
|
|
||||||
// All bash -c commands should use single quotes
|
// All bash -c commands should use single quotes
|
||||||
expect(cmd).toMatch(/^bash -c '/);
|
assert.match(cmd, /^bash -c '/);
|
||||||
expect(cmd).toMatch(/'$/);
|
assert.match(cmd, /'$/);
|
||||||
|
|
||||||
// jq patterns should be intact
|
// jq patterns should be intact
|
||||||
expect(cmd).toContain('jq -r ".');
|
assert.ok(cmd.includes('jq -r ".'));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -206,9 +206,8 @@ describe('Smart Search Tool Definition', async () => {
|
|||||||
const modeEnum = params.properties.mode?.enum;
|
const modeEnum = params.properties.mode?.enum;
|
||||||
|
|
||||||
assert.ok(modeEnum, 'Should have mode enum');
|
assert.ok(modeEnum, 'Should have mode enum');
|
||||||
assert.ok(modeEnum.includes('auto'), 'Should support auto mode');
|
assert.ok(modeEnum.includes('fuzzy'), 'Should support fuzzy mode');
|
||||||
assert.ok(modeEnum.includes('hybrid'), 'Should support hybrid mode');
|
assert.ok(modeEnum.includes('semantic'), 'Should support semantic mode');
|
||||||
assert.ok(modeEnum.includes('exact'), 'Should support exact mode');
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -146,9 +146,12 @@ class Config:
|
|||||||
staged_coarse_k: int = 200 # Number of coarse candidates from Stage 1 binary search
|
staged_coarse_k: int = 200 # Number of coarse candidates from Stage 1 binary search
|
||||||
staged_lsp_depth: int = 2 # LSP relationship expansion depth in Stage 2
|
staged_lsp_depth: int = 2 # LSP relationship expansion depth in Stage 2
|
||||||
staged_stage2_mode: str = "precomputed" # "precomputed" (graph_neighbors) | "realtime" (LSP)
|
staged_stage2_mode: str = "precomputed" # "precomputed" (graph_neighbors) | "realtime" (LSP)
|
||||||
staged_realtime_lsp_timeout_s: float = 10.0 # Max time budget for realtime LSP expansion
|
staged_realtime_lsp_timeout_s: float = 30.0 # Max time budget for realtime LSP expansion
|
||||||
staged_realtime_lsp_max_nodes: int = 100 # Node cap for realtime graph expansion
|
staged_realtime_lsp_depth: int = 1 # BFS depth for realtime LSP expansion
|
||||||
staged_realtime_lsp_warmup_s: float = 2.0 # Wait for server analysis after opening seed docs
|
staged_realtime_lsp_max_nodes: int = 50 # Node cap for realtime graph expansion
|
||||||
|
staged_realtime_lsp_max_seeds: int = 1 # Seed cap for realtime graph expansion
|
||||||
|
staged_realtime_lsp_max_concurrent: int = 2 # Max concurrent LSP requests during graph expansion
|
||||||
|
staged_realtime_lsp_warmup_s: float = 3.0 # Wait for server analysis after opening seed docs
|
||||||
staged_realtime_lsp_resolve_symbols: bool = False # If True, resolves symbol names via documentSymbol (slower)
|
staged_realtime_lsp_resolve_symbols: bool = False # If True, resolves symbol names via documentSymbol (slower)
|
||||||
staged_clustering_strategy: str = "auto" # "auto", "hdbscan", "dbscan", "frequency", "noop"
|
staged_clustering_strategy: str = "auto" # "auto", "hdbscan", "dbscan", "frequency", "noop"
|
||||||
staged_clustering_min_size: int = 3 # Minimum cluster size for Stage 3 grouping
|
staged_clustering_min_size: int = 3 # Minimum cluster size for Stage 3 grouping
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ Features:
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
@@ -22,6 +23,8 @@ from pathlib import Path
|
|||||||
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
from typing import Any, Dict, List, Optional, TYPE_CHECKING
|
||||||
from urllib.parse import unquote
|
from urllib.parse import unquote
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from codexlens.lsp.standalone_manager import StandaloneLspManager
|
from codexlens.lsp.standalone_manager import StandaloneLspManager
|
||||||
|
|
||||||
@@ -362,6 +365,14 @@ class LspBridge:
|
|||||||
except (KeyError, TypeError):
|
except (KeyError, TypeError):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"LSP references for %s (%s:%s:%s): %d",
|
||||||
|
symbol.id,
|
||||||
|
symbol.file_path,
|
||||||
|
symbol.range.start_line,
|
||||||
|
symbol.range.start_character,
|
||||||
|
len(locations),
|
||||||
|
)
|
||||||
self._cache(cache_key, symbol.file_path, locations)
|
self._cache(cache_key, symbol.file_path, locations)
|
||||||
return locations
|
return locations
|
||||||
|
|
||||||
@@ -542,6 +553,14 @@ class LspBridge:
|
|||||||
detail="Inferred from reference",
|
detail="Inferred from reference",
|
||||||
))
|
))
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"LSP call hierarchy for %s (%s:%s:%s): %d",
|
||||||
|
symbol.id,
|
||||||
|
symbol.file_path,
|
||||||
|
symbol.range.start_line,
|
||||||
|
symbol.range.start_character,
|
||||||
|
len(items),
|
||||||
|
)
|
||||||
self._cache(cache_key, symbol.file_path, items)
|
self._cache(cache_key, symbol.file_path, items)
|
||||||
return items
|
return items
|
||||||
|
|
||||||
|
|||||||
@@ -854,7 +854,7 @@ class ChainSearchEngine:
|
|||||||
|
|
||||||
# ========== Stage 2: LSP Graph Expansion ==========
|
# ========== Stage 2: LSP Graph Expansion ==========
|
||||||
stage2_start = time.time()
|
stage2_start = time.time()
|
||||||
expanded_results = self._stage2_lsp_expand(coarse_results, index_root)
|
expanded_results = self._stage2_lsp_expand(coarse_results, index_root, query=query)
|
||||||
stage_times["stage2_expand_ms"] = (time.time() - stage2_start) * 1000
|
stage_times["stage2_expand_ms"] = (time.time() - stage2_start) * 1000
|
||||||
stage_counts["stage2_expanded"] = len(expanded_results)
|
stage_counts["stage2_expanded"] = len(expanded_results)
|
||||||
|
|
||||||
@@ -969,8 +969,9 @@ class ChainSearchEngine:
|
|||||||
|
|
||||||
# Try centralized BinarySearcher first (preferred for mmap indexes)
|
# Try centralized BinarySearcher first (preferred for mmap indexes)
|
||||||
index_root = index_paths[0].parent if index_paths else None
|
index_root = index_paths[0].parent if index_paths else None
|
||||||
coarse_candidates: List[Tuple[int, int, Path]] = [] # (chunk_id, distance, index_path)
|
coarse_candidates: List[Tuple[int, float, Path]] = [] # (chunk_id, distance, index_path)
|
||||||
used_centralized = False
|
used_centralized = False
|
||||||
|
using_dense_fallback = False
|
||||||
|
|
||||||
if index_root:
|
if index_root:
|
||||||
binary_searcher = self._get_centralized_binary_searcher(index_root)
|
binary_searcher = self._get_centralized_binary_searcher(index_root)
|
||||||
@@ -992,30 +993,78 @@ class ChainSearchEngine:
|
|||||||
self.logger.debug(f"Centralized binary search failed: {exc}")
|
self.logger.debug(f"Centralized binary search failed: {exc}")
|
||||||
|
|
||||||
if not used_centralized:
|
if not used_centralized:
|
||||||
# Fallback to per-directory binary indexes
|
# Fallback to per-directory binary indexes (legacy BinaryANNIndex).
|
||||||
use_gpu = True
|
#
|
||||||
if self._config is not None:
|
# Generating the query binary embedding can be expensive (depending on embedding backend).
|
||||||
use_gpu = getattr(self._config, "embedding_use_gpu", True)
|
# If no legacy binary vector files exist, skip this path and fall back to dense ANN search.
|
||||||
|
has_legacy_binary_vectors = any(
|
||||||
|
(p.parent / f"{p.stem}_binary_vectors.bin").exists() for p in index_paths
|
||||||
|
)
|
||||||
|
if not has_legacy_binary_vectors:
|
||||||
|
self.logger.debug(
|
||||||
|
"No legacy binary vector files found; skipping legacy binary search fallback"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
use_gpu = True
|
||||||
|
if self._config is not None:
|
||||||
|
use_gpu = getattr(self._config, "embedding_use_gpu", True)
|
||||||
|
|
||||||
try:
|
query_binary = None
|
||||||
binary_backend = BinaryEmbeddingBackend(use_gpu=use_gpu)
|
|
||||||
query_binary = binary_backend.embed_packed([query])[0]
|
|
||||||
except Exception as exc:
|
|
||||||
self.logger.warning(f"Failed to generate binary query embedding: {exc}")
|
|
||||||
return [], index_root
|
|
||||||
|
|
||||||
for index_path in index_paths:
|
|
||||||
try:
|
try:
|
||||||
binary_index = self._get_or_create_binary_index(index_path)
|
binary_backend = BinaryEmbeddingBackend(use_gpu=use_gpu)
|
||||||
if binary_index is None or binary_index.count() == 0:
|
query_binary = binary_backend.embed_packed([query])[0]
|
||||||
continue
|
|
||||||
ids, distances = binary_index.search(query_binary, coarse_k)
|
|
||||||
for chunk_id, dist in zip(ids, distances):
|
|
||||||
coarse_candidates.append((chunk_id, dist, index_path))
|
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self.logger.debug(
|
self.logger.warning(f"Failed to generate binary query embedding: {exc}")
|
||||||
"Binary search failed for %s: %s", index_path, exc
|
query_binary = None
|
||||||
)
|
|
||||||
|
if query_binary is not None:
|
||||||
|
for index_path in index_paths:
|
||||||
|
try:
|
||||||
|
binary_index = self._get_or_create_binary_index(index_path)
|
||||||
|
if binary_index is None or binary_index.count() == 0:
|
||||||
|
continue
|
||||||
|
ids, distances = binary_index.search(query_binary, coarse_k)
|
||||||
|
for chunk_id, dist in zip(ids, distances):
|
||||||
|
coarse_candidates.append((chunk_id, float(dist), index_path))
|
||||||
|
except Exception as exc:
|
||||||
|
self.logger.debug(
|
||||||
|
"Binary search failed for %s: %s", index_path, exc
|
||||||
|
)
|
||||||
|
|
||||||
|
if not coarse_candidates:
|
||||||
|
# Final fallback: dense ANN coarse search (HNSW) over existing dense vector indexes.
|
||||||
|
#
|
||||||
|
# This allows the staged pipeline (LSP expansion + clustering) to run even when
|
||||||
|
# binary vectors are not generated for the current project.
|
||||||
|
dense_candidates: List[Tuple[int, float, Path]] = []
|
||||||
|
try:
|
||||||
|
from codexlens.semantic.ann_index import ANNIndex
|
||||||
|
from codexlens.semantic.embedder import Embedder
|
||||||
|
|
||||||
|
embedder = Embedder()
|
||||||
|
query_dense = embedder.embed_to_numpy([query])[0]
|
||||||
|
dim = int(getattr(query_dense, "shape", (len(query_dense),))[0])
|
||||||
|
|
||||||
|
for index_path in index_paths:
|
||||||
|
try:
|
||||||
|
ann_index = ANNIndex(index_path, dim=dim)
|
||||||
|
if not ann_index.load() or ann_index.count() == 0:
|
||||||
|
continue
|
||||||
|
ids, distances = ann_index.search(query_dense, top_k=coarse_k)
|
||||||
|
for chunk_id, dist in zip(ids, distances):
|
||||||
|
dense_candidates.append((chunk_id, float(dist), index_path))
|
||||||
|
except Exception as exc:
|
||||||
|
self.logger.debug(
|
||||||
|
"Dense coarse search failed for %s: %s", index_path, exc
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
self.logger.debug("Dense coarse search fallback unavailable: %s", exc)
|
||||||
|
dense_candidates = []
|
||||||
|
|
||||||
|
if dense_candidates:
|
||||||
|
dense_candidates.sort(key=lambda x: x[1])
|
||||||
|
coarse_candidates = dense_candidates[:coarse_k]
|
||||||
|
using_dense_fallback = True
|
||||||
|
|
||||||
if not coarse_candidates:
|
if not coarse_candidates:
|
||||||
return [], index_root
|
return [], index_root
|
||||||
@@ -1086,7 +1135,11 @@ class ChainSearchEngine:
|
|||||||
(d for cid, d, _ in coarse_candidates if cid == chunk_id),
|
(d for cid, d, _ in coarse_candidates if cid == chunk_id),
|
||||||
256
|
256
|
||||||
)
|
)
|
||||||
score = 1.0 - (distance / 256.0)
|
if using_dense_fallback:
|
||||||
|
# Cosine distance in [0, 2] -> clamp to [0, 1] score
|
||||||
|
score = max(0.0, 1.0 - float(distance))
|
||||||
|
else:
|
||||||
|
score = 1.0 - (int(distance) / 256.0)
|
||||||
|
|
||||||
content = chunk.get("content", "")
|
content = chunk.get("content", "")
|
||||||
|
|
||||||
@@ -1129,6 +1182,7 @@ class ChainSearchEngine:
|
|||||||
self,
|
self,
|
||||||
coarse_results: List[SearchResult],
|
coarse_results: List[SearchResult],
|
||||||
index_root: Optional[Path],
|
index_root: Optional[Path],
|
||||||
|
query: Optional[str] = None,
|
||||||
) -> List[SearchResult]:
|
) -> List[SearchResult]:
|
||||||
"""Stage 2: LSP/graph expansion for staged cascade.
|
"""Stage 2: LSP/graph expansion for staged cascade.
|
||||||
|
|
||||||
@@ -1152,7 +1206,11 @@ class ChainSearchEngine:
|
|||||||
mode = (getattr(self._config, "staged_stage2_mode", "precomputed") or "precomputed").strip().lower()
|
mode = (getattr(self._config, "staged_stage2_mode", "precomputed") or "precomputed").strip().lower()
|
||||||
|
|
||||||
if mode in {"realtime", "live"}:
|
if mode in {"realtime", "live"}:
|
||||||
return self._stage2_realtime_lsp_expand(coarse_results, index_root=index_root)
|
return self._stage2_realtime_lsp_expand(
|
||||||
|
coarse_results,
|
||||||
|
index_root=index_root,
|
||||||
|
query=query,
|
||||||
|
)
|
||||||
|
|
||||||
return self._stage2_precomputed_graph_expand(coarse_results, index_root=index_root)
|
return self._stage2_precomputed_graph_expand(coarse_results, index_root=index_root)
|
||||||
|
|
||||||
@@ -1209,6 +1267,7 @@ class ChainSearchEngine:
|
|||||||
coarse_results: List[SearchResult],
|
coarse_results: List[SearchResult],
|
||||||
*,
|
*,
|
||||||
index_root: Path,
|
index_root: Path,
|
||||||
|
query: Optional[str] = None,
|
||||||
) -> List[SearchResult]:
|
) -> List[SearchResult]:
|
||||||
"""Stage 2 (realtime): compute expansion graph via live LSP servers."""
|
"""Stage 2 (realtime): compute expansion graph via live LSP servers."""
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -1217,16 +1276,27 @@ class ChainSearchEngine:
|
|||||||
from codexlens.hybrid_search.data_structures import CodeSymbolNode, Range
|
from codexlens.hybrid_search.data_structures import CodeSymbolNode, Range
|
||||||
from codexlens.lsp import LspBridge, LspGraphBuilder
|
from codexlens.lsp import LspBridge, LspGraphBuilder
|
||||||
|
|
||||||
max_depth = 2
|
max_depth = 1
|
||||||
timeout_s = 10.0
|
timeout_s = 30.0
|
||||||
max_nodes = 100
|
max_nodes = 50
|
||||||
warmup_s = 2.0
|
max_seeds = 1
|
||||||
|
max_concurrent = 2
|
||||||
|
warmup_s = 3.0
|
||||||
resolve_symbols = False
|
resolve_symbols = False
|
||||||
if self._config is not None:
|
if self._config is not None:
|
||||||
max_depth = int(getattr(self._config, "staged_lsp_depth", 2) or 2)
|
max_depth = int(
|
||||||
timeout_s = float(getattr(self._config, "staged_realtime_lsp_timeout_s", 10.0) or 10.0)
|
getattr(
|
||||||
max_nodes = int(getattr(self._config, "staged_realtime_lsp_max_nodes", 100) or 100)
|
self._config,
|
||||||
warmup_s = float(getattr(self._config, "staged_realtime_lsp_warmup_s", 2.0) or 0.0)
|
"staged_realtime_lsp_depth",
|
||||||
|
getattr(self._config, "staged_lsp_depth", 1),
|
||||||
|
)
|
||||||
|
or 1
|
||||||
|
)
|
||||||
|
timeout_s = float(getattr(self._config, "staged_realtime_lsp_timeout_s", 30.0) or 30.0)
|
||||||
|
max_nodes = int(getattr(self._config, "staged_realtime_lsp_max_nodes", 50) or 50)
|
||||||
|
warmup_s = float(getattr(self._config, "staged_realtime_lsp_warmup_s", 3.0) or 0.0)
|
||||||
|
max_seeds = int(getattr(self._config, "staged_realtime_lsp_max_seeds", 1) or 1)
|
||||||
|
max_concurrent = int(getattr(self._config, "staged_realtime_lsp_max_concurrent", 2) or 2)
|
||||||
resolve_symbols = bool(getattr(self._config, "staged_realtime_lsp_resolve_symbols", False))
|
resolve_symbols = bool(getattr(self._config, "staged_realtime_lsp_resolve_symbols", False))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1234,13 +1304,189 @@ class ChainSearchEngine:
|
|||||||
except Exception:
|
except Exception:
|
||||||
source_root = Path(coarse_results[0].path).resolve().parent
|
source_root = Path(coarse_results[0].path).resolve().parent
|
||||||
|
|
||||||
workspace_root = self._find_lsp_workspace_root(source_root)
|
lsp_config_file = self._find_lsp_config_file(source_root)
|
||||||
|
workspace_root = Path(source_root).resolve()
|
||||||
|
|
||||||
max_expand = min(10, len(coarse_results))
|
max_expand = min(max(1, max_seeds), len(coarse_results))
|
||||||
seed_nodes: List[CodeSymbolNode] = []
|
seed_nodes: List[CodeSymbolNode] = []
|
||||||
seed_ids: set[str] = set()
|
seed_ids: set[str] = set()
|
||||||
|
|
||||||
for seed in list(coarse_results)[:max_expand]:
|
selected_results = list(coarse_results)
|
||||||
|
if query:
|
||||||
|
import re
|
||||||
|
|
||||||
|
terms = {
|
||||||
|
t.lower()
|
||||||
|
for t in re.findall(r"[A-Za-z_][A-Za-z0-9_]*", query)
|
||||||
|
if t
|
||||||
|
}
|
||||||
|
|
||||||
|
def _priority(result: SearchResult) -> float:
|
||||||
|
sym = (result.symbol_name or "").strip().lower()
|
||||||
|
stem = Path(result.path).stem.lower() if result.path else ""
|
||||||
|
score = 0.0
|
||||||
|
if sym and sym in terms:
|
||||||
|
score += 5.0
|
||||||
|
if sym:
|
||||||
|
score += 2.0
|
||||||
|
if stem and stem in terms:
|
||||||
|
score += 1.0
|
||||||
|
if result.symbol_kind:
|
||||||
|
score += 0.5
|
||||||
|
if result.start_line:
|
||||||
|
score += 0.2
|
||||||
|
return score
|
||||||
|
|
||||||
|
indexed = list(enumerate(selected_results))
|
||||||
|
indexed.sort(
|
||||||
|
key=lambda pair: (
|
||||||
|
_priority(pair[1]),
|
||||||
|
float(pair[1].score),
|
||||||
|
-pair[0],
|
||||||
|
),
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
selected_results = [r for _, r in indexed]
|
||||||
|
else:
|
||||||
|
indexed = list(enumerate(selected_results))
|
||||||
|
indexed.sort(
|
||||||
|
key=lambda pair: (
|
||||||
|
1.0 if pair[1].symbol_name else 0.0,
|
||||||
|
float(pair[1].score),
|
||||||
|
-pair[0],
|
||||||
|
),
|
||||||
|
reverse=True,
|
||||||
|
)
|
||||||
|
selected_results = [r for _, r in indexed]
|
||||||
|
|
||||||
|
# Prefer symbol-definition seeds when possible (improves LSP reference/call-hierarchy results).
|
||||||
|
#
|
||||||
|
# NOTE: We avoid relying purely on the stored symbol index here because its ranges may be
|
||||||
|
# imprecise in some projects. Instead, we attempt a lightweight definition-line detection
|
||||||
|
# for query identifiers within the top coarse candidate files.
|
||||||
|
if query:
|
||||||
|
try:
|
||||||
|
import re
|
||||||
|
|
||||||
|
terms_raw = [
|
||||||
|
t for t in re.findall(r"[A-Za-z_][A-Za-z0-9_]*", query) if t
|
||||||
|
]
|
||||||
|
stopwords = {
|
||||||
|
"class", "def", "function", "method", "import", "from", "return",
|
||||||
|
"async", "await", "public", "private", "protected", "static",
|
||||||
|
"const", "let", "var", "new",
|
||||||
|
}
|
||||||
|
candidate_terms = [
|
||||||
|
t for t in terms_raw
|
||||||
|
if t.lower() not in stopwords and len(t) >= 3
|
||||||
|
]
|
||||||
|
|
||||||
|
candidate_terms.sort(key=len, reverse=True)
|
||||||
|
|
||||||
|
# Candidate files (best-first): de-dupe while preserving ordering.
|
||||||
|
candidate_files: List[str] = []
|
||||||
|
seen_files: set[str] = set()
|
||||||
|
for r in selected_results:
|
||||||
|
if r.path and r.path not in seen_files:
|
||||||
|
seen_files.add(r.path)
|
||||||
|
candidate_files.append(r.path)
|
||||||
|
if len(candidate_files) >= 50:
|
||||||
|
break
|
||||||
|
|
||||||
|
# Also consider files whose *names* match query identifiers (helps when coarse retrieval
|
||||||
|
# misses the defining file for a symbol like `Config`).
|
||||||
|
try:
|
||||||
|
if source_root and candidate_terms:
|
||||||
|
allow_suffix = {".py", ".ts", ".tsx", ".js", ".jsx"}
|
||||||
|
name_terms = [t.lower() for t in candidate_terms[:3]]
|
||||||
|
for dirpath, _, filenames in os.walk(source_root):
|
||||||
|
for filename in filenames:
|
||||||
|
suffix = Path(filename).suffix.lower()
|
||||||
|
if suffix not in allow_suffix:
|
||||||
|
continue
|
||||||
|
lowered = filename.lower()
|
||||||
|
if any(t in lowered for t in name_terms):
|
||||||
|
fp = str(Path(dirpath) / filename)
|
||||||
|
if fp not in seen_files:
|
||||||
|
seen_files.add(fp)
|
||||||
|
candidate_files.append(fp)
|
||||||
|
if len(candidate_files) >= 120:
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for term in candidate_terms[:5]:
|
||||||
|
if len(seed_nodes) >= max_expand:
|
||||||
|
break
|
||||||
|
|
||||||
|
escaped = re.escape(term)
|
||||||
|
py_class = re.compile(rf"^\s*class\s+{escaped}\b")
|
||||||
|
py_def = re.compile(rf"^\s*(?:async\s+)?def\s+{escaped}\b")
|
||||||
|
ts_class = re.compile(rf"^\s*(?:export\s+)?class\s+{escaped}\b")
|
||||||
|
ts_func = re.compile(rf"^\s*(?:export\s+)?(?:async\s+)?function\s+{escaped}\b")
|
||||||
|
|
||||||
|
for file_path in candidate_files:
|
||||||
|
if len(seed_nodes) >= max_expand:
|
||||||
|
break
|
||||||
|
suffix = Path(file_path).suffix.lower()
|
||||||
|
if suffix not in {".py", ".ts", ".tsx", ".js", ".jsx"}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
lines = Path(file_path).read_text(encoding="utf-8", errors="ignore").splitlines()
|
||||||
|
except Exception:
|
||||||
|
continue
|
||||||
|
|
||||||
|
for i, line in enumerate(lines):
|
||||||
|
kind = None
|
||||||
|
if suffix == ".py":
|
||||||
|
if py_class.search(line):
|
||||||
|
kind = "class"
|
||||||
|
elif py_def.search(line):
|
||||||
|
kind = "function"
|
||||||
|
else:
|
||||||
|
if ts_class.search(line):
|
||||||
|
kind = "class"
|
||||||
|
elif ts_func.search(line):
|
||||||
|
kind = "function"
|
||||||
|
|
||||||
|
if not kind:
|
||||||
|
continue
|
||||||
|
|
||||||
|
start_line = i + 1
|
||||||
|
idx = line.find(term)
|
||||||
|
if idx >= 0:
|
||||||
|
start_character = idx + 1
|
||||||
|
else:
|
||||||
|
stripped = line.lstrip()
|
||||||
|
start_character = (len(line) - len(stripped)) + 1 if stripped else 1
|
||||||
|
|
||||||
|
node_id = f"{file_path}:{term}:{start_line}"
|
||||||
|
if node_id in seed_ids:
|
||||||
|
break
|
||||||
|
|
||||||
|
seed_ids.add(node_id)
|
||||||
|
seed_nodes.append(
|
||||||
|
CodeSymbolNode(
|
||||||
|
id=node_id,
|
||||||
|
name=term,
|
||||||
|
kind=kind,
|
||||||
|
file_path=file_path,
|
||||||
|
range=Range(
|
||||||
|
start_line=start_line,
|
||||||
|
start_character=start_character,
|
||||||
|
end_line=start_line,
|
||||||
|
end_character=start_character,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for seed in selected_results:
|
||||||
|
if len(seed_nodes) >= max_expand:
|
||||||
|
break
|
||||||
if not seed.path:
|
if not seed.path:
|
||||||
continue
|
continue
|
||||||
name = seed.symbol_name or Path(seed.path).stem
|
name = seed.symbol_name or Path(seed.path).stem
|
||||||
@@ -1249,14 +1495,21 @@ class ChainSearchEngine:
|
|||||||
end_line = int(seed.end_line or start_line)
|
end_line = int(seed.end_line or start_line)
|
||||||
start_character = 1
|
start_character = 1
|
||||||
try:
|
try:
|
||||||
if seed.symbol_name and start_line >= 1:
|
if start_line >= 1:
|
||||||
line_text = Path(seed.path).read_text(encoding="utf-8", errors="ignore").splitlines()[start_line - 1]
|
line_text = Path(seed.path).read_text(encoding="utf-8", errors="ignore").splitlines()[start_line - 1]
|
||||||
idx = line_text.find(seed.symbol_name)
|
if seed.symbol_name:
|
||||||
if idx >= 0:
|
idx = line_text.find(seed.symbol_name)
|
||||||
start_character = idx + 1 # 1-based for StandaloneLspManager
|
if idx >= 0:
|
||||||
|
start_character = idx + 1 # 1-based for StandaloneLspManager
|
||||||
|
else:
|
||||||
|
stripped = line_text.lstrip()
|
||||||
|
if stripped:
|
||||||
|
start_character = (len(line_text) - len(stripped)) + 1
|
||||||
except Exception:
|
except Exception:
|
||||||
start_character = 1
|
start_character = 1
|
||||||
node_id = f"{seed.path}:{name}:{start_line}"
|
node_id = f"{seed.path}:{name}:{start_line}"
|
||||||
|
if node_id in seed_ids:
|
||||||
|
continue
|
||||||
seed_ids.add(node_id)
|
seed_ids.add(node_id)
|
||||||
seed_nodes.append(
|
seed_nodes.append(
|
||||||
CodeSymbolNode(
|
CodeSymbolNode(
|
||||||
@@ -1268,7 +1521,7 @@ class ChainSearchEngine:
|
|||||||
start_line=start_line,
|
start_line=start_line,
|
||||||
start_character=start_character,
|
start_character=start_character,
|
||||||
end_line=end_line,
|
end_line=end_line,
|
||||||
end_character=1,
|
end_character=start_character if end_line == start_line else 1,
|
||||||
),
|
),
|
||||||
raw_code=seed.content or "",
|
raw_code=seed.content or "",
|
||||||
docstring=seed.excerpt or "",
|
docstring=seed.excerpt or "",
|
||||||
@@ -1279,7 +1532,11 @@ class ChainSearchEngine:
|
|||||||
return coarse_results
|
return coarse_results
|
||||||
|
|
||||||
async def expand_graph():
|
async def expand_graph():
|
||||||
async with LspBridge(workspace_root=str(workspace_root), timeout=timeout_s) as bridge:
|
async with LspBridge(
|
||||||
|
workspace_root=str(workspace_root),
|
||||||
|
config_file=str(lsp_config_file) if lsp_config_file else None,
|
||||||
|
timeout=timeout_s,
|
||||||
|
) as bridge:
|
||||||
# Warm up analysis: open seed docs and wait a bit so references/call hierarchy are populated.
|
# Warm up analysis: open seed docs and wait a bit so references/call hierarchy are populated.
|
||||||
if warmup_s > 0:
|
if warmup_s > 0:
|
||||||
for seed in seed_nodes[:3]:
|
for seed in seed_nodes[:3]:
|
||||||
@@ -1288,12 +1545,14 @@ class ChainSearchEngine:
|
|||||||
except Exception:
|
except Exception:
|
||||||
continue
|
continue
|
||||||
try:
|
try:
|
||||||
await asyncio.sleep(min(warmup_s, max(0.0, timeout_s - 0.5)))
|
warmup_budget = min(warmup_s, max(0.0, timeout_s * 0.1))
|
||||||
|
await asyncio.sleep(min(warmup_budget, max(0.0, timeout_s - 0.5)))
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
builder = LspGraphBuilder(
|
builder = LspGraphBuilder(
|
||||||
max_depth=max_depth,
|
max_depth=max_depth,
|
||||||
max_nodes=max_nodes,
|
max_nodes=max_nodes,
|
||||||
|
max_concurrent=max(1, max_concurrent),
|
||||||
resolve_symbols=resolve_symbols,
|
resolve_symbols=resolve_symbols,
|
||||||
)
|
)
|
||||||
return await builder.build_from_seeds(seed_nodes, bridge)
|
return await builder.build_from_seeds(seed_nodes, bridge)
|
||||||
@@ -1314,9 +1573,21 @@ class ChainSearchEngine:
|
|||||||
else:
|
else:
|
||||||
graph = run_coro_blocking()
|
graph = run_coro_blocking()
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self.logger.debug("Stage 2 (realtime) expansion failed: %s", exc)
|
self.logger.debug("Stage 2 (realtime) expansion failed: %r", exc)
|
||||||
return coarse_results
|
return coarse_results
|
||||||
|
|
||||||
|
try:
|
||||||
|
node_count = len(getattr(graph, "nodes", {}) or {})
|
||||||
|
edge_count = len(getattr(graph, "edges", []) or [])
|
||||||
|
except Exception:
|
||||||
|
node_count, edge_count = 0, 0
|
||||||
|
self.logger.debug(
|
||||||
|
"Stage 2 (realtime) graph built: seeds=%d nodes=%d edges=%d",
|
||||||
|
len(seed_nodes),
|
||||||
|
node_count,
|
||||||
|
edge_count,
|
||||||
|
)
|
||||||
|
|
||||||
related_results: List[SearchResult] = []
|
related_results: List[SearchResult] = []
|
||||||
for node_id, node in getattr(graph, "nodes", {}).items():
|
for node_id, node in getattr(graph, "nodes", {}).items():
|
||||||
if node_id in seed_ids or getattr(node, "id", "") in seed_ids:
|
if node_id in seed_ids or getattr(node, "id", "") in seed_ids:
|
||||||
@@ -1395,6 +1666,21 @@ class ChainSearchEngine:
|
|||||||
|
|
||||||
return start
|
return start
|
||||||
|
|
||||||
|
def _find_lsp_config_file(self, start_path: Path) -> Optional[Path]:
|
||||||
|
"""Find a lsp-servers.json by walking up from start_path."""
|
||||||
|
start = Path(start_path).resolve()
|
||||||
|
if start.is_file():
|
||||||
|
start = start.parent
|
||||||
|
|
||||||
|
for current in [start, *list(start.parents)]:
|
||||||
|
try:
|
||||||
|
candidate = current / "lsp-servers.json"
|
||||||
|
if candidate.is_file():
|
||||||
|
return candidate
|
||||||
|
except OSError:
|
||||||
|
continue
|
||||||
|
return None
|
||||||
|
|
||||||
def _stage3_cluster_prune(
|
def _stage3_cluster_prune(
|
||||||
self,
|
self,
|
||||||
expanded_results: List[SearchResult],
|
expanded_results: List[SearchResult],
|
||||||
|
|||||||
@@ -290,7 +290,7 @@ class PathMapper:
|
|||||||
# Check if first part is a drive letter
|
# Check if first part is a drive letter
|
||||||
if len(parts[0]) == 1 and parts[0].isalpha():
|
if len(parts[0]) == 1 and parts[0].isalpha():
|
||||||
# D/path/to/dir → D:/path/to/dir
|
# D/path/to/dir → D:/path/to/dir
|
||||||
drive = f"{parts[0]}:"
|
drive = f"{parts[0]}:/"
|
||||||
if len(parts) > 1:
|
if len(parts) > 1:
|
||||||
return Path(drive) / Path(*parts[1:])
|
return Path(drive) / Path(*parts[1:])
|
||||||
return Path(drive)
|
return Path(drive)
|
||||||
|
|||||||
19
codex-lens/tests/test_path_mapper_windows_drive.py
Normal file
19
codex-lens/tests/test_path_mapper_windows_drive.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import platform
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from codexlens.storage.path_mapper import PathMapper
|
||||||
|
|
||||||
|
|
||||||
|
def test_denormalize_path_windows_drive_is_absolute() -> None:
|
||||||
|
if platform.system() != "Windows":
|
||||||
|
return
|
||||||
|
|
||||||
|
mapper = PathMapper(index_root=Path("C:/tmp/codexlens_indexes"))
|
||||||
|
mapped = mapper.denormalize_path("D/Claude_dms3/codex-lens/src")
|
||||||
|
|
||||||
|
assert mapped.is_absolute()
|
||||||
|
assert str(mapped).lower().startswith("d:\\") or str(mapped).lower().startswith("d:/")
|
||||||
|
assert mapped == Path("D:/Claude_dms3/codex-lens/src")
|
||||||
|
|
||||||
@@ -63,3 +63,69 @@ def test_stage1_binary_search_prefers_chunk_start_line(tmp_path: Path) -> None:
|
|||||||
finally:
|
finally:
|
||||||
engine.close()
|
engine.close()
|
||||||
|
|
||||||
|
|
||||||
|
def test_stage1_binary_search_dense_fallback(tmp_path: Path) -> None:
|
||||||
|
registry = RegistryStore(db_path=tmp_path / "registry.db")
|
||||||
|
registry.initialize()
|
||||||
|
mapper = PathMapper(index_root=tmp_path / "indexes")
|
||||||
|
engine = ChainSearchEngine(registry, mapper, config=Config(data_dir=tmp_path / "data"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
index_root = tmp_path / "fake_index_root"
|
||||||
|
index_root.mkdir(parents=True, exist_ok=True)
|
||||||
|
index_db = index_root / "_index.db"
|
||||||
|
index_db.write_text("", encoding="utf-8")
|
||||||
|
(index_root / VECTORS_META_DB_NAME).write_text("", encoding="utf-8")
|
||||||
|
|
||||||
|
class _DummyEmbedder:
|
||||||
|
def embed_to_numpy(self, texts):
|
||||||
|
_ = texts
|
||||||
|
# Only dim matters for ANNIndex initialization
|
||||||
|
return [[0.0, 1.0, 2.0]]
|
||||||
|
|
||||||
|
class _DummyANNIndex:
|
||||||
|
def __init__(self, *args, **kwargs) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def load(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def count(self) -> int:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def search(self, query_vec, top_k: int = 10):
|
||||||
|
_ = query_vec
|
||||||
|
_ = top_k
|
||||||
|
return [123], [0.2]
|
||||||
|
|
||||||
|
dummy_meta_store = MagicMock()
|
||||||
|
dummy_meta_store.get_chunks_by_ids.return_value = [
|
||||||
|
{
|
||||||
|
"chunk_id": 123,
|
||||||
|
"file_path": str(tmp_path / "b.py"),
|
||||||
|
"content": "def b():\n return 2\n",
|
||||||
|
"start_line": 20,
|
||||||
|
"end_line": 22,
|
||||||
|
"metadata": {},
|
||||||
|
"category": "code",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
with patch.object(engine, "_get_centralized_binary_searcher", return_value=None):
|
||||||
|
with patch("codexlens.search.chain_search.VectorMetadataStore", return_value=dummy_meta_store):
|
||||||
|
with patch("codexlens.semantic.embedder.Embedder", return_value=_DummyEmbedder()):
|
||||||
|
with patch("codexlens.semantic.ann_index.ANNIndex", _DummyANNIndex):
|
||||||
|
coarse_results, returned_root = engine._stage1_binary_search(
|
||||||
|
"b",
|
||||||
|
[index_db],
|
||||||
|
coarse_k=1,
|
||||||
|
stats=SearchStats(),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert returned_root == index_root
|
||||||
|
assert len(coarse_results) == 1
|
||||||
|
assert coarse_results[0].start_line == 20
|
||||||
|
assert coarse_results[0].end_line == 22
|
||||||
|
assert coarse_results[0].score == 0.8
|
||||||
|
finally:
|
||||||
|
engine.close()
|
||||||
|
|||||||
182
test-mcp-tools.mjs
Normal file
182
test-mcp-tools.mjs
Normal file
@@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
/**
|
||||||
|
* MCP Tools Test Script
|
||||||
|
* Tests the modified read_file and edit_file tools with parameter validation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { executeTool } from './ccw/dist/tools/index.js';
|
||||||
|
|
||||||
|
console.log('🧪 MCP Tools Test Suite\n');
|
||||||
|
console.log('Testing modified parameters:\n');
|
||||||
|
|
||||||
|
let passed = 0;
|
||||||
|
let failed = 0;
|
||||||
|
|
||||||
|
// Test helper
|
||||||
|
async function test(name, testFn) {
|
||||||
|
try {
|
||||||
|
await testFn();
|
||||||
|
console.log(`✅ ${name}`);
|
||||||
|
passed++;
|
||||||
|
} catch (error) {
|
||||||
|
console.log(`❌ ${name}`);
|
||||||
|
console.error(` Error: ${error.message}`);
|
||||||
|
failed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test 1: read_file - single file with offset/limit (should succeed)
|
||||||
|
await test('read_file: single file + offset/limit (valid)', async () => {
|
||||||
|
const result = await executeTool('read_file', {
|
||||||
|
paths: 'README.md',
|
||||||
|
offset: 0,
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
console.log(` → Read ${result.result.files.length} file, ${result.result.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 2: read_file - multiple files with offset/limit (should FAIL with new validation)
|
||||||
|
await test('read_file: multiple files + offset/limit (validation error)', async () => {
|
||||||
|
const result = await executeTool('read_file', {
|
||||||
|
paths: ['README.md', 'package.json'],
|
||||||
|
offset: 0,
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
throw new Error('Expected validation error but succeeded');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.error.includes('offset/limit')) {
|
||||||
|
throw new Error(`Expected error message about offset/limit, got: ${result.error}`);
|
||||||
|
}
|
||||||
|
console.log(` → Got expected error: ${result.error.substring(0, 60)}...`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 3: read_file - multiple files without offset/limit (should succeed)
|
||||||
|
await test('read_file: multiple files without offset/limit (valid)', async () => {
|
||||||
|
const result = await executeTool('read_file', {
|
||||||
|
paths: ['README.md', 'package.json']
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
console.log(` → Read ${result.result.files.length} files`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 4: edit_file - update mode with oldText/newText (should succeed)
|
||||||
|
await test('edit_file: update mode + oldText/newText (valid)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'update',
|
||||||
|
oldText: 'old content',
|
||||||
|
newText: 'new content',
|
||||||
|
dryRun: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
console.log(` → ${result.result.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 5: edit_file - update mode with edits (should succeed)
|
||||||
|
await test('edit_file: update mode + edits (valid)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'update',
|
||||||
|
edits: [{ oldText: 'old', newText: 'new' }],
|
||||||
|
dryRun: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
console.log(` → ${result.result.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 6: edit_file - update mode with BOTH oldText/newText AND edits (should FAIL)
|
||||||
|
await test('edit_file: update mode + both oldText/newText AND edits (validation error)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'update',
|
||||||
|
oldText: 'old',
|
||||||
|
newText: 'new',
|
||||||
|
edits: [{ oldText: 'old2', newText: 'new2' }],
|
||||||
|
dryRun: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
throw new Error('Expected validation error but succeeded');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!result.error.includes('oldText/newText') && !result.error.includes('edits')) {
|
||||||
|
throw new Error(`Expected error about oldText/newText or edits, got: ${result.error}`);
|
||||||
|
}
|
||||||
|
console.log(` → Got expected error: ${result.error.substring(0, 80)}...`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 7: edit_file - update mode without proper parameters (should FAIL - no oldText/newText or edits)
|
||||||
|
await test('edit_file: update mode without proper parameters (validation error)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'update'
|
||||||
|
// Missing both oldText/newText and edits
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
throw new Error('Expected validation error but succeeded');
|
||||||
|
}
|
||||||
|
console.log(` → Got expected error: ${result.error.substring(0, 80)}...`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 8: edit_file - line mode with line mode parameters (should succeed)
|
||||||
|
await test('edit_file: line mode + line mode parameters (valid)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'line',
|
||||||
|
operation: 'insert_after',
|
||||||
|
line: 1,
|
||||||
|
text: 'new line'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
throw new Error(result.error);
|
||||||
|
}
|
||||||
|
console.log(` → ${result.result.message}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test 9: edit_file - line mode missing required text (should FAIL)
|
||||||
|
await test('edit_file: line mode + insert without text (validation error)', async () => {
|
||||||
|
const result = await executeTool('edit_file', {
|
||||||
|
path: 'README.md',
|
||||||
|
mode: 'line',
|
||||||
|
operation: 'insert_after',
|
||||||
|
line: 1
|
||||||
|
// missing 'text' parameter
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
throw new Error('Expected validation error but succeeded');
|
||||||
|
}
|
||||||
|
console.log(` → Got expected error: ${result.error.substring(0, 80)}...`);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Summary
|
||||||
|
console.log(`\n📊 Test Results:`);
|
||||||
|
console.log(` ✅ Passed: ${passed}`);
|
||||||
|
console.log(` ❌ Failed: ${failed}`);
|
||||||
|
console.log(` 📈 Success Rate: ${((passed / (passed + failed)) * 100).toFixed(1)}%`);
|
||||||
|
|
||||||
|
if (failed === 0) {
|
||||||
|
console.log('\n🎉 All tests passed!');
|
||||||
|
process.exit(0);
|
||||||
|
} else {
|
||||||
|
console.log(`\n⚠️ ${failed} test(s) failed`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user