feat(mcp): add read_file tool and simplify edit/write returns

- edit_file: truncate diff to 15 lines, compact result format
- write_file: return only path/bytes/message
- read_file: new tool with multi-file, directory, regex support
  - paths: single file, array, or directory
  - pattern: glob filter (*.ts)
  - contentPattern: regex content search
  - maxDepth, maxFiles, includeContent options
- Update tool-strategy.md documentation

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
catlog22
2025-12-13 17:28:03 +08:00
parent 029384c427
commit 675aff26ff
17 changed files with 1108 additions and 248 deletions

View File

@@ -40,6 +40,7 @@ const ParamsSchema = z.object({
tool: z.enum(['gemini', 'qwen', 'codex']),
prompt: z.string().min(1, 'Prompt is required'),
mode: z.enum(['analysis', 'write', 'auto']).default('analysis'),
format: z.enum(['plain', 'yaml', 'json']).default('plain'), // Multi-turn prompt concatenation format
model: z.string().optional(),
cd: z.string().optional(),
includeDirs: z.string().optional(),
@@ -50,6 +51,9 @@ const ParamsSchema = z.object({
type Params = z.infer<typeof ParamsSchema>;
// Prompt concatenation format types
type PromptFormat = 'plain' | 'yaml' | 'json';
interface ToolAvailability {
available: boolean;
path: string | null;
@@ -247,21 +251,6 @@ function ensureHistoryDir(baseDir: string): string {
return historyDir;
}
/**
* Load history index
*/
function loadHistoryIndex(historyDir: string): HistoryIndex {
const indexPath = join(historyDir, 'index.json');
if (existsSync(indexPath)) {
try {
return JSON.parse(readFileSync(indexPath, 'utf8'));
} catch {
return { version: 1, total_executions: 0, executions: [] };
}
}
return { version: 1, total_executions: 0, executions: [] };
}
/**
* Save conversation to SQLite
*/
@@ -384,29 +373,25 @@ function mergeConversations(conversations: ConversationRecord[]): MergeResult {
/**
* Build prompt from merged conversations
*/
function buildMergedPrompt(mergeResult: MergeResult, newPrompt: string): string {
const parts: string[] = [];
function buildMergedPrompt(
mergeResult: MergeResult,
newPrompt: string,
format: PromptFormat = 'plain'
): string {
const concatenator = createPromptConcatenator({ format });
parts.push('=== MERGED CONVERSATION HISTORY ===');
parts.push(`(From ${mergeResult.sourceConversations.length} conversations: ${mergeResult.sourceConversations.map(c => c.id).join(', ')})`);
parts.push('');
// Set metadata for merged conversations
concatenator.setMetadata(
'merged_sources',
mergeResult.sourceConversations.map(c => c.id).join(', ')
);
// Add all merged turns with source tracking
for (const turn of mergeResult.mergedTurns) {
parts.push(`--- Turn ${turn.turn} [${turn.source_id}] ---`);
parts.push('USER:');
parts.push(turn.prompt);
parts.push('');
parts.push('ASSISTANT:');
parts.push(turn.output.stdout || '[No output recorded]');
parts.push('');
concatenator.addFromConversationTurn(turn, turn.source_id);
}
parts.push('=== NEW REQUEST ===');
parts.push('');
parts.push(newPrompt);
return parts.join('\n');
return concatenator.build(newPrompt);
}
/**
@@ -421,7 +406,7 @@ async function executeCliTool(
throw new Error(`Invalid params: ${parsed.error.message}`);
}
const { tool, prompt, mode, model, cd, includeDirs, timeout, resume, id: customId } = parsed.data;
const { tool, prompt, mode, format, model, cd, includeDirs, timeout, resume, id: customId } = parsed.data;
// Determine working directory early (needed for conversation lookup)
const workingDir = cd || process.cwd();
@@ -505,11 +490,11 @@ async function executeCliTool(
// For append: use existingConversation (from target ID)
let finalPrompt = prompt;
if (mergeResult && mergeResult.mergedTurns.length > 0) {
finalPrompt = buildMergedPrompt(mergeResult, prompt);
finalPrompt = buildMergedPrompt(mergeResult, prompt, format);
} else {
const conversationForContext = contextConversation || existingConversation;
if (conversationForContext && conversationForContext.turns.length > 0) {
finalPrompt = buildMultiTurnPrompt(conversationForContext, prompt);
finalPrompt = buildMultiTurnPrompt(conversationForContext, prompt, format);
}
}
@@ -845,9 +830,9 @@ function findCliHistoryDirs(baseDir: string, maxDepth: number = 3): string[] {
function scanDir(dir: string, depth: number) {
if (depth > maxDepth) return;
// Check if this directory has CLI history
// Check if this directory has CLI history (SQLite database)
const historyDir = join(dir, '.workflow', '.cli-history');
if (existsSync(join(historyDir, 'index.json'))) {
if (existsSync(join(historyDir, 'history.db'))) {
historyDirs.push(historyDir);
}
@@ -1047,11 +1032,6 @@ export async function getCliToolsStatus(): Promise<Record<string, ToolAvailabili
// ========== Prompt Concatenation System ==========
/**
* Supported prompt concatenation formats
*/
type PromptFormat = 'plain' | 'yaml' | 'json';
/**
* Turn data structure for concatenation
*/
@@ -1477,7 +1457,7 @@ export { executeCliTool, checkToolAvailability };
export { PromptConcatenator, createPromptConcatenator, buildPrompt, buildMultiTurnPrompt };
// Note: Async storage functions (getExecutionHistoryAsync, deleteExecutionAsync,
// batchDeleteExecutionsAsync, setStorageBackend) are exported at declaration site
// batchDeleteExecutionsAsync) are exported at declaration site - SQLite storage only
// Export tool definition (for legacy imports) - This allows direct calls to execute with onOutput
export const cliExecutorTool = {

View File

@@ -52,6 +52,9 @@ interface UpdateModeResult {
message: string;
}
// Max lines to show in diff preview
const MAX_DIFF_LINES = 15;
interface LineModeResult {
content: string;
modified: boolean;
@@ -61,7 +64,7 @@ interface LineModeResult {
message: string;
}
type EditResult = Omit<UpdateModeResult | LineModeResult, 'content'>;
// Internal type for mode results (content excluded in final output)
/**
* Resolve file path and read content
@@ -485,8 +488,30 @@ Options: dryRun=true (preview diff), replaceAll=true (replace all occurrences)`,
},
};
/**
* Truncate diff to max lines with indicator
*/
function truncateDiff(diff: string, maxLines: number): string {
if (!diff) return '';
const lines = diff.split('\n');
if (lines.length <= maxLines) return diff;
return lines.slice(0, maxLines).join('\n') + `\n... (+${lines.length - maxLines} more lines)`;
}
/**
* Build compact result for output
*/
interface CompactEditResult {
path: string;
modified: boolean;
message: string;
replacements?: number;
diff?: string;
dryRun?: boolean;
}
// Handler function
export async function handler(params: Record<string, unknown>): Promise<ToolResult<EditResult>> {
export async function handler(params: Record<string, unknown>): Promise<ToolResult<CompactEditResult>> {
const parsed = ParamsSchema.safeParse(params);
if (!parsed.success) {
return { success: false, error: `Invalid params: ${parsed.error.message}` };
@@ -514,9 +539,24 @@ export async function handler(params: Record<string, unknown>): Promise<ToolResu
writeFile(resolvedPath, result.content);
}
// Remove content from result
const { content: _, ...output } = result;
return { success: true, result: output as EditResult };
// Build compact result
const compactResult: CompactEditResult = {
path: resolvedPath,
modified: result.modified,
message: result.message,
};
// Add mode-specific fields
if ('replacements' in result) {
compactResult.replacements = result.replacements;
compactResult.dryRun = result.dryRun;
// Truncate diff for compact output
if (result.diff) {
compactResult.diff = truncateDiff(result.diff, MAX_DIFF_LINES);
}
}
return { success: true, result: compactResult };
} catch (error) {
return { success: false, error: (error as Error).message };
}

View File

@@ -19,6 +19,7 @@ import * as sessionManagerMod from './session-manager.js';
import * as cliExecutorMod from './cli-executor.js';
import * as smartSearchMod from './smart-search.js';
import * as codexLensMod from './codex-lens.js';
import * as readFileMod from './read-file.js';
// Import legacy JS tools
import { uiGeneratePreviewTool } from './ui-generate-preview.js';
@@ -297,6 +298,7 @@ registerTool(toLegacyTool(sessionManagerMod));
registerTool(toLegacyTool(cliExecutorMod));
registerTool(toLegacyTool(smartSearchMod));
registerTool(toLegacyTool(codexLensMod));
registerTool(toLegacyTool(readFileMod));
// Register legacy JS tools
registerTool(uiGeneratePreviewTool);

325
ccw/src/tools/read-file.ts Normal file
View File

@@ -0,0 +1,325 @@
/**
* Read File Tool - Read files with multi-file, directory, and regex support
*
* Features:
* - Read single or multiple files
* - Read all files in a directory (with depth control)
* - Filter files by glob/regex pattern
* - Content search with regex
* - Compact output format
*/
import { z } from 'zod';
import type { ToolSchema, ToolResult } from '../types/tool.js';
import { readFileSync, readdirSync, statSync, existsSync } from 'fs';
import { resolve, isAbsolute, join, relative, extname } from 'path';
// Max content per file (truncate if larger)
const MAX_CONTENT_LENGTH = 5000;
// Max files to return
const MAX_FILES = 50;
// Max total content length
const MAX_TOTAL_CONTENT = 50000;
// Define Zod schema for validation
const ParamsSchema = z.object({
paths: z.union([z.string(), z.array(z.string())]).describe('File path(s) or directory'),
pattern: z.string().optional().describe('Glob pattern to filter files (e.g., "*.ts", "**/*.js")'),
contentPattern: z.string().optional().describe('Regex to search within file content'),
maxDepth: z.number().default(3).describe('Max directory depth to traverse'),
includeContent: z.boolean().default(true).describe('Include file content in result'),
maxFiles: z.number().default(MAX_FILES).describe('Max number of files to return'),
});
type Params = z.infer<typeof ParamsSchema>;
interface FileEntry {
path: string;
size: number;
content?: string;
truncated?: boolean;
matches?: string[];
}
interface ReadResult {
files: FileEntry[];
totalFiles: number;
message: string;
}
// Common binary extensions to skip
const BINARY_EXTENSIONS = new Set([
'.png', '.jpg', '.jpeg', '.gif', '.bmp', '.ico', '.webp', '.svg',
'.pdf', '.doc', '.docx', '.xls', '.xlsx', '.ppt', '.pptx',
'.zip', '.tar', '.gz', '.rar', '.7z',
'.exe', '.dll', '.so', '.dylib',
'.mp3', '.mp4', '.wav', '.avi', '.mov',
'.woff', '.woff2', '.ttf', '.eot', '.otf',
'.pyc', '.class', '.o', '.obj',
]);
/**
* Check if file is likely binary
*/
function isBinaryFile(filePath: string): boolean {
const ext = extname(filePath).toLowerCase();
return BINARY_EXTENSIONS.has(ext);
}
/**
* Convert glob pattern to regex
*/
function globToRegex(pattern: string): RegExp {
const escaped = pattern
.replace(/[.+^${}()|[\]\\]/g, '\\$&')
.replace(/\*/g, '.*')
.replace(/\?/g, '.');
return new RegExp(`^${escaped}$`, 'i');
}
/**
* Check if filename matches glob pattern
*/
function matchesPattern(filename: string, pattern: string): boolean {
const regex = globToRegex(pattern);
return regex.test(filename);
}
/**
* Recursively collect files from directory
*/
function collectFiles(
dir: string,
pattern: string | undefined,
maxDepth: number,
currentDepth: number = 0
): string[] {
if (currentDepth > maxDepth) return [];
const files: string[] = [];
try {
const entries = readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
// Skip hidden files/dirs and node_modules
if (entry.name.startsWith('.') || entry.name === 'node_modules') continue;
const fullPath = join(dir, entry.name);
if (entry.isDirectory()) {
files.push(...collectFiles(fullPath, pattern, maxDepth, currentDepth + 1));
} else if (entry.isFile()) {
if (!pattern || matchesPattern(entry.name, pattern)) {
files.push(fullPath);
}
}
}
} catch {
// Skip directories we can't read
}
return files;
}
/**
* Read file content with truncation
*/
function readFileContent(filePath: string, maxLength: number): { content: string; truncated: boolean } {
if (isBinaryFile(filePath)) {
return { content: '[Binary file]', truncated: false };
}
try {
const content = readFileSync(filePath, 'utf8');
if (content.length > maxLength) {
return {
content: content.substring(0, maxLength) + `\n... (+${content.length - maxLength} chars)`,
truncated: true
};
}
return { content, truncated: false };
} catch (error) {
return { content: `[Error: ${(error as Error).message}]`, truncated: false };
}
}
/**
* Find regex matches in content
*/
function findMatches(content: string, pattern: string): string[] {
try {
const regex = new RegExp(pattern, 'gm');
const matches: string[] = [];
let match;
while ((match = regex.exec(content)) !== null && matches.length < 10) {
// Get line containing match
const lineStart = content.lastIndexOf('\n', match.index) + 1;
const lineEnd = content.indexOf('\n', match.index);
const line = content.substring(lineStart, lineEnd === -1 ? undefined : lineEnd).trim();
matches.push(line.substring(0, 200)); // Truncate long lines
}
return matches;
} catch {
return [];
}
}
// Tool schema for MCP
export const schema: ToolSchema = {
name: 'read_file',
description: `Read files with multi-file, directory, and regex support.
Usage:
read_file(paths="file.ts") # Single file
read_file(paths=["a.ts", "b.ts"]) # Multiple files
read_file(paths="src/", pattern="*.ts") # Directory with pattern
read_file(paths="src/", contentPattern="TODO") # Search content
Returns compact file list with optional content.`,
inputSchema: {
type: 'object',
properties: {
paths: {
oneOf: [
{ type: 'string', description: 'Single file or directory path' },
{ type: 'array', items: { type: 'string' }, description: 'Array of file paths' }
],
description: 'File path(s) or directory to read',
},
pattern: {
type: 'string',
description: 'Glob pattern to filter files (e.g., "*.ts", "*.{js,ts}")',
},
contentPattern: {
type: 'string',
description: 'Regex pattern to search within file content',
},
maxDepth: {
type: 'number',
description: 'Max directory depth to traverse (default: 3)',
default: 3,
},
includeContent: {
type: 'boolean',
description: 'Include file content in result (default: true)',
default: true,
},
maxFiles: {
type: 'number',
description: `Max number of files to return (default: ${MAX_FILES})`,
default: MAX_FILES,
},
},
required: ['paths'],
},
};
// Handler function
export async function handler(params: Record<string, unknown>): Promise<ToolResult<ReadResult>> {
const parsed = ParamsSchema.safeParse(params);
if (!parsed.success) {
return { success: false, error: `Invalid params: ${parsed.error.message}` };
}
const {
paths,
pattern,
contentPattern,
maxDepth,
includeContent,
maxFiles,
} = parsed.data;
const cwd = process.cwd();
// Normalize paths to array
const inputPaths = Array.isArray(paths) ? paths : [paths];
// Collect all files to read
const allFiles: string[] = [];
for (const inputPath of inputPaths) {
const resolvedPath = isAbsolute(inputPath) ? inputPath : resolve(cwd, inputPath);
if (!existsSync(resolvedPath)) {
continue; // Skip non-existent paths
}
const stat = statSync(resolvedPath);
if (stat.isDirectory()) {
// Collect files from directory
const dirFiles = collectFiles(resolvedPath, pattern, maxDepth);
allFiles.push(...dirFiles);
} else if (stat.isFile()) {
// Add single file (check pattern if provided)
if (!pattern || matchesPattern(relative(cwd, resolvedPath), pattern)) {
allFiles.push(resolvedPath);
}
}
}
// Limit files
const limitedFiles = allFiles.slice(0, maxFiles);
const totalFiles = allFiles.length;
// Process files
const files: FileEntry[] = [];
let totalContent = 0;
for (const filePath of limitedFiles) {
if (totalContent >= MAX_TOTAL_CONTENT) break;
const stat = statSync(filePath);
const entry: FileEntry = {
path: relative(cwd, filePath) || filePath,
size: stat.size,
};
if (includeContent) {
const remainingSpace = MAX_TOTAL_CONTENT - totalContent;
const maxLen = Math.min(MAX_CONTENT_LENGTH, remainingSpace);
const { content, truncated } = readFileContent(filePath, maxLen);
// If contentPattern provided, only include files with matches
if (contentPattern) {
const matches = findMatches(content, contentPattern);
if (matches.length > 0) {
entry.matches = matches;
entry.content = content;
entry.truncated = truncated;
totalContent += content.length;
} else {
continue; // Skip files without matches
}
} else {
entry.content = content;
entry.truncated = truncated;
totalContent += content.length;
}
}
files.push(entry);
}
// Build message
let message = `Read ${files.length} file(s)`;
if (totalFiles > maxFiles) {
message += ` (showing ${maxFiles} of ${totalFiles})`;
}
if (contentPattern) {
message += ` matching "${contentPattern}"`;
}
return {
success: true,
result: {
files,
totalFiles,
message,
},
};
}

View File

@@ -24,12 +24,9 @@ const ParamsSchema = z.object({
type Params = z.infer<typeof ParamsSchema>;
// Compact result for output
interface WriteResult {
success: boolean;
path: string;
created: boolean;
overwritten: boolean;
backupPath: string | null;
bytes: number;
message: string;
}
@@ -153,19 +150,24 @@ export async function handler(params: Record<string, unknown>): Promise<ToolResu
// Write file
try {
writeFileSync(resolvedPath, content, { encoding });
const bytes = Buffer.byteLength(content, encoding);
// Build compact message
let message: string;
if (fileExists) {
message = backupPath
? `Overwrote (${bytes}B, backup: ${basename(backupPath)})`
: `Overwrote (${bytes}B)`;
} else {
message = `Created (${bytes}B)`;
}
return {
success: true,
result: {
success: true,
path: resolvedPath,
created: !fileExists,
overwritten: fileExists,
backupPath,
bytes: Buffer.byteLength(content, encoding),
message: fileExists
? `Successfully overwrote ${filePath}${backupPath ? ` (backup: ${backupPath})` : ''}`
: `Successfully created ${filePath}`,
bytes,
message,
},
};
} catch (error) {