mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: Enhance CLI components with icons and improve file editing capabilities
- Added icons to the CLI History and CLI Tools headers for better UI representation. - Updated CLI Status component to include tool-specific classes for styling. - Refactored CCW Install Panel to improve layout and functionality, including upgrade and uninstall buttons. - Enhanced the edit-file tool with new features: - Support for creating parent directories when writing files. - Added dryRun mode for previewing changes without modifying files. - Implemented a unified diff output for changes made. - Enabled multi-edit support in update mode. - Introduced a new Smart Search Tool with multiple search modes (auto, exact, fuzzy, semantic, graph) and intent classification. - Created a Write File Tool to handle file creation and overwriting with backup options.
This commit is contained in:
@@ -3,10 +3,16 @@
|
||||
* Two complementary modes:
|
||||
* - update: Content-driven text replacement (AI primary use)
|
||||
* - line: Position-driven line operations (precise control)
|
||||
*
|
||||
* Features:
|
||||
* - dryRun mode for previewing changes
|
||||
* - Git-style diff output
|
||||
* - Multi-edit support in update mode
|
||||
* - Auto line-ending adaptation (CRLF/LF)
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync, existsSync } from 'fs';
|
||||
import { resolve, isAbsolute } from 'path';
|
||||
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
||||
import { resolve, isAbsolute, dirname } from 'path';
|
||||
|
||||
/**
|
||||
* Resolve file path and read content
|
||||
@@ -29,51 +35,218 @@ function readFile(filePath) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Write content to file
|
||||
* Write content to file with optional parent directory creation
|
||||
* @param {string} filePath - Path to file
|
||||
* @param {string} content - Content to write
|
||||
* @param {boolean} createDirs - Create parent directories if needed
|
||||
*/
|
||||
function writeFile(filePath, content) {
|
||||
function writeFile(filePath, content, createDirs = false) {
|
||||
try {
|
||||
if (createDirs) {
|
||||
const dir = dirname(filePath);
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
writeFileSync(filePath, content, 'utf8');
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to write file: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Normalize line endings to LF
|
||||
* @param {string} text - Input text
|
||||
* @returns {string} - Text with LF line endings
|
||||
*/
|
||||
function normalizeLineEndings(text) {
|
||||
return text.replace(/\r\n/g, '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create unified diff between two strings
|
||||
* @param {string} original - Original content
|
||||
* @param {string} modified - Modified content
|
||||
* @param {string} filePath - File path for diff header
|
||||
* @returns {string} - Unified diff string
|
||||
*/
|
||||
function createUnifiedDiff(original, modified, filePath) {
|
||||
const origLines = normalizeLineEndings(original).split('\n');
|
||||
const modLines = normalizeLineEndings(modified).split('\n');
|
||||
|
||||
const diffLines = [
|
||||
`--- a/${filePath}`,
|
||||
`+++ b/${filePath}`
|
||||
];
|
||||
|
||||
// Simple diff algorithm - find changes
|
||||
let i = 0, j = 0;
|
||||
let hunk = [];
|
||||
let hunkStart = 0;
|
||||
let origStart = 0;
|
||||
let modStart = 0;
|
||||
|
||||
while (i < origLines.length || j < modLines.length) {
|
||||
if (i < origLines.length && j < modLines.length && origLines[i] === modLines[j]) {
|
||||
// Context line
|
||||
if (hunk.length > 0) {
|
||||
hunk.push(` ${origLines[i]}`);
|
||||
}
|
||||
i++;
|
||||
j++;
|
||||
} else {
|
||||
// Start or continue hunk
|
||||
if (hunk.length === 0) {
|
||||
origStart = i + 1;
|
||||
modStart = j + 1;
|
||||
// Add context before
|
||||
const contextStart = Math.max(0, i - 3);
|
||||
for (let c = contextStart; c < i; c++) {
|
||||
hunk.push(` ${origLines[c]}`);
|
||||
}
|
||||
origStart = contextStart + 1;
|
||||
modStart = contextStart + 1;
|
||||
}
|
||||
|
||||
// Find where lines match again
|
||||
let foundMatch = false;
|
||||
for (let lookAhead = 1; lookAhead <= 10; lookAhead++) {
|
||||
if (i + lookAhead < origLines.length && j < modLines.length &&
|
||||
origLines[i + lookAhead] === modLines[j]) {
|
||||
// Remove lines from original
|
||||
for (let r = 0; r < lookAhead; r++) {
|
||||
hunk.push(`-${origLines[i + r]}`);
|
||||
}
|
||||
i += lookAhead;
|
||||
foundMatch = true;
|
||||
break;
|
||||
}
|
||||
if (j + lookAhead < modLines.length && i < origLines.length &&
|
||||
modLines[j + lookAhead] === origLines[i]) {
|
||||
// Add lines to modified
|
||||
for (let a = 0; a < lookAhead; a++) {
|
||||
hunk.push(`+${modLines[j + a]}`);
|
||||
}
|
||||
j += lookAhead;
|
||||
foundMatch = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!foundMatch) {
|
||||
// Replace line
|
||||
if (i < origLines.length) {
|
||||
hunk.push(`-${origLines[i]}`);
|
||||
i++;
|
||||
}
|
||||
if (j < modLines.length) {
|
||||
hunk.push(`+${modLines[j]}`);
|
||||
j++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush hunk if we've had 3 context lines after changes
|
||||
const lastChangeIdx = hunk.findLastIndex(l => l.startsWith('+') || l.startsWith('-'));
|
||||
if (lastChangeIdx >= 0 && hunk.length - lastChangeIdx > 3) {
|
||||
const origCount = hunk.filter(l => !l.startsWith('+')).length;
|
||||
const modCount = hunk.filter(l => !l.startsWith('-')).length;
|
||||
diffLines.push(`@@ -${origStart},${origCount} +${modStart},${modCount} @@`);
|
||||
diffLines.push(...hunk);
|
||||
hunk = [];
|
||||
}
|
||||
}
|
||||
|
||||
// Flush remaining hunk
|
||||
if (hunk.length > 0) {
|
||||
const origCount = hunk.filter(l => !l.startsWith('+')).length;
|
||||
const modCount = hunk.filter(l => !l.startsWith('-')).length;
|
||||
diffLines.push(`@@ -${origStart},${origCount} +${modStart},${modCount} @@`);
|
||||
diffLines.push(...hunk);
|
||||
}
|
||||
|
||||
return diffLines.length > 2 ? diffLines.join('\n') : '';
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode: update - Simple text replacement
|
||||
* Auto-adapts line endings (CRLF/LF)
|
||||
* Supports multiple edits via 'edits' array
|
||||
*/
|
||||
function executeUpdateMode(content, params) {
|
||||
const { oldText, newText, replaceAll } = params;
|
||||
|
||||
if (!oldText) throw new Error('Parameter "oldText" is required for update mode');
|
||||
if (newText === undefined) throw new Error('Parameter "newText" is required for update mode');
|
||||
function executeUpdateMode(content, params, filePath) {
|
||||
const { oldText, newText, replaceAll, edits, dryRun = false } = params;
|
||||
|
||||
// Detect original line ending
|
||||
const hasCRLF = content.includes('\r\n');
|
||||
|
||||
// Normalize to LF for matching
|
||||
const normalize = (str) => str.replace(/\r\n/g, '\n');
|
||||
const normalizedContent = normalize(content);
|
||||
const normalizedOld = normalize(oldText);
|
||||
const normalizedNew = normalize(newText);
|
||||
const normalizedContent = normalizeLineEndings(content);
|
||||
const originalContent = normalizedContent;
|
||||
|
||||
let newContent = normalizedContent;
|
||||
let status = 'not found';
|
||||
let replacements = 0;
|
||||
const editResults = [];
|
||||
|
||||
if (newContent.includes(normalizedOld)) {
|
||||
if (replaceAll) {
|
||||
const parts = newContent.split(normalizedOld);
|
||||
replacements = parts.length - 1;
|
||||
newContent = parts.join(normalizedNew);
|
||||
status = 'replaced_all';
|
||||
} else {
|
||||
newContent = newContent.replace(normalizedOld, normalizedNew);
|
||||
// Support multiple edits via 'edits' array (like reference impl)
|
||||
const editOperations = edits || (oldText !== undefined ? [{ oldText, newText }] : []);
|
||||
|
||||
if (editOperations.length === 0) {
|
||||
throw new Error('Either "oldText/newText" or "edits" array is required for update mode');
|
||||
}
|
||||
|
||||
for (const edit of editOperations) {
|
||||
const normalizedOld = normalizeLineEndings(edit.oldText || '');
|
||||
const normalizedNew = normalizeLineEndings(edit.newText || '');
|
||||
|
||||
if (!normalizedOld) {
|
||||
editResults.push({ status: 'error', message: 'Empty oldText' });
|
||||
continue;
|
||||
}
|
||||
|
||||
if (newContent.includes(normalizedOld)) {
|
||||
if (replaceAll) {
|
||||
const parts = newContent.split(normalizedOld);
|
||||
const count = parts.length - 1;
|
||||
newContent = parts.join(normalizedNew);
|
||||
replacements += count;
|
||||
editResults.push({ status: 'replaced_all', count });
|
||||
} else {
|
||||
newContent = newContent.replace(normalizedOld, normalizedNew);
|
||||
replacements += 1;
|
||||
editResults.push({ status: 'replaced', count: 1 });
|
||||
}
|
||||
status = 'replaced';
|
||||
replacements = 1;
|
||||
} else {
|
||||
// Try fuzzy match (trimmed whitespace)
|
||||
const lines = newContent.split('\n');
|
||||
const oldLines = normalizedOld.split('\n');
|
||||
let matchFound = false;
|
||||
|
||||
for (let i = 0; i <= lines.length - oldLines.length; i++) {
|
||||
const potentialMatch = lines.slice(i, i + oldLines.length);
|
||||
const isMatch = oldLines.every((oldLine, j) =>
|
||||
oldLine.trim() === potentialMatch[j].trim()
|
||||
);
|
||||
|
||||
if (isMatch) {
|
||||
// Preserve indentation of first line
|
||||
const indent = lines[i].match(/^\s*/)?.[0] || '';
|
||||
const newLines = normalizedNew.split('\n').map((line, j) => {
|
||||
if (j === 0) return indent + line.trimStart();
|
||||
return line;
|
||||
});
|
||||
lines.splice(i, oldLines.length, ...newLines);
|
||||
newContent = lines.join('\n');
|
||||
replacements += 1;
|
||||
editResults.push({ status: 'replaced_fuzzy', count: 1 });
|
||||
matchFound = true;
|
||||
status = 'replaced';
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matchFound) {
|
||||
editResults.push({ status: 'not_found', oldText: normalizedOld.substring(0, 50) });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,17 +255,23 @@ function executeUpdateMode(content, params) {
|
||||
newContent = newContent.replace(/\n/g, '\r\n');
|
||||
}
|
||||
|
||||
// Generate diff if content changed
|
||||
let diff = '';
|
||||
if (originalContent !== normalizeLineEndings(newContent)) {
|
||||
diff = createUnifiedDiff(originalContent, normalizeLineEndings(newContent), filePath);
|
||||
}
|
||||
|
||||
return {
|
||||
content: newContent,
|
||||
modified: content !== newContent,
|
||||
status,
|
||||
status: replacements > 0 ? 'replaced' : 'not found',
|
||||
replacements,
|
||||
message:
|
||||
status === 'replaced_all'
|
||||
? `Text replaced successfully (${replacements} occurrences)`
|
||||
: status === 'replaced'
|
||||
? 'Text replaced successfully'
|
||||
: 'oldText not found in file'
|
||||
editResults,
|
||||
diff,
|
||||
dryRun,
|
||||
message: replacements > 0
|
||||
? `${replacements} replacement(s) made${dryRun ? ' (dry run)' : ''}`
|
||||
: 'No matches found'
|
||||
};
|
||||
}
|
||||
|
||||
@@ -179,7 +358,7 @@ function executeLineMode(content, params) {
|
||||
* Main execute function - routes to appropriate mode
|
||||
*/
|
||||
async function execute(params) {
|
||||
const { path: filePath, mode = 'update' } = params;
|
||||
const { path: filePath, mode = 'update', dryRun = false } = params;
|
||||
|
||||
if (!filePath) throw new Error('Parameter "path" is required');
|
||||
|
||||
@@ -188,7 +367,7 @@ async function execute(params) {
|
||||
let result;
|
||||
switch (mode) {
|
||||
case 'update':
|
||||
result = executeUpdateMode(content, params);
|
||||
result = executeUpdateMode(content, params, filePath);
|
||||
break;
|
||||
case 'line':
|
||||
result = executeLineMode(content, params);
|
||||
@@ -197,8 +376,8 @@ async function execute(params) {
|
||||
throw new Error(`Unknown mode: ${mode}. Valid modes: update, line`);
|
||||
}
|
||||
|
||||
// Write if modified
|
||||
if (result.modified) {
|
||||
// Write if modified and not dry run
|
||||
if (result.modified && !dryRun) {
|
||||
writeFile(resolvedPath, result.content);
|
||||
}
|
||||
|
||||
@@ -212,9 +391,14 @@ async function execute(params) {
|
||||
*/
|
||||
export const editFileTool = {
|
||||
name: 'edit_file',
|
||||
description: `Update file with two modes:
|
||||
- update: Replace oldText with newText (default)
|
||||
- line: Position-driven line operations`,
|
||||
description: `Edit file with two modes:
|
||||
- update: Replace oldText with newText (default). Supports multiple edits via 'edits' array.
|
||||
- line: Position-driven line operations (insert_before, insert_after, replace, delete)
|
||||
|
||||
Features:
|
||||
- dryRun: Preview changes without modifying file (returns diff)
|
||||
- Auto line ending adaptation (CRLF/LF)
|
||||
- Fuzzy matching for whitespace differences`,
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
@@ -228,15 +412,32 @@ export const editFileTool = {
|
||||
description: 'Edit mode (default: update)',
|
||||
default: 'update'
|
||||
},
|
||||
dryRun: {
|
||||
type: 'boolean',
|
||||
description: 'Preview changes using git-style diff without modifying file (default: false)',
|
||||
default: false
|
||||
},
|
||||
// Update mode params
|
||||
oldText: {
|
||||
type: 'string',
|
||||
description: '[update mode] Text to find and replace'
|
||||
description: '[update mode] Text to find and replace (use oldText/newText OR edits array)'
|
||||
},
|
||||
newText: {
|
||||
type: 'string',
|
||||
description: '[update mode] Replacement text'
|
||||
},
|
||||
edits: {
|
||||
type: 'array',
|
||||
description: '[update mode] Array of {oldText, newText} for multiple replacements',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
oldText: { type: 'string', description: 'Text to search for - must match exactly' },
|
||||
newText: { type: 'string', description: 'Text to replace with' }
|
||||
},
|
||||
required: ['oldText', 'newText']
|
||||
}
|
||||
},
|
||||
replaceAll: {
|
||||
type: 'boolean',
|
||||
description: '[update mode] Replace all occurrences of oldText (default: false)'
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
|
||||
import http from 'http';
|
||||
import { editFileTool } from './edit-file.js';
|
||||
import { writeFileTool } from './write-file.js';
|
||||
import { getModulesByDepthTool } from './get-modules-by-depth.js';
|
||||
import { classifyFoldersTool } from './classify-folders.js';
|
||||
import { detectChangedModulesTool } from './detect-changed-modules.js';
|
||||
@@ -16,6 +17,7 @@ import { updateModuleClaudeTool } from './update-module-claude.js';
|
||||
import { convertTokensToCssTool } from './convert-tokens-to-css.js';
|
||||
import { sessionManagerTool } from './session-manager.js';
|
||||
import { cliExecutorTool } from './cli-executor.js';
|
||||
import { smartSearchTool } from './smart-search.js';
|
||||
|
||||
// Tool registry - add new tools here
|
||||
const tools = new Map();
|
||||
@@ -249,6 +251,7 @@ export function getAllToolSchemas() {
|
||||
|
||||
// Register built-in tools
|
||||
registerTool(editFileTool);
|
||||
registerTool(writeFileTool);
|
||||
registerTool(getModulesByDepthTool);
|
||||
registerTool(classifyFoldersTool);
|
||||
registerTool(detectChangedModulesTool);
|
||||
@@ -260,6 +263,7 @@ registerTool(updateModuleClaudeTool);
|
||||
registerTool(convertTokensToCssTool);
|
||||
registerTool(sessionManagerTool);
|
||||
registerTool(cliExecutorTool);
|
||||
registerTool(smartSearchTool);
|
||||
|
||||
// Export for external tool registration
|
||||
export { registerTool };
|
||||
|
||||
487
ccw/src/tools/smart-search.js
Normal file
487
ccw/src/tools/smart-search.js
Normal file
@@ -0,0 +1,487 @@
|
||||
/**
|
||||
* Smart Search Tool - Unified search with mode-based execution
|
||||
* Modes: auto, exact, fuzzy, semantic, graph
|
||||
*
|
||||
* Features:
|
||||
* - Intent classification (auto mode)
|
||||
* - Multi-backend search routing
|
||||
* - Result fusion with RRF ranking
|
||||
* - Configurable search parameters
|
||||
*/
|
||||
|
||||
import { spawn, execSync } from 'child_process';
|
||||
import { existsSync, readdirSync, statSync } from 'fs';
|
||||
import { join, resolve, isAbsolute } from 'path';
|
||||
|
||||
// Search mode constants
|
||||
const SEARCH_MODES = ['auto', 'exact', 'fuzzy', 'semantic', 'graph'];
|
||||
|
||||
// Classification confidence threshold
|
||||
const CONFIDENCE_THRESHOLD = 0.7;
|
||||
|
||||
/**
|
||||
* Detection heuristics for intent classification
|
||||
*/
|
||||
|
||||
/**
|
||||
* Detect literal string query (simple alphanumeric or quoted strings)
|
||||
*/
|
||||
function detectLiteral(query) {
|
||||
return /^[a-zA-Z0-9_-]+$/.test(query) || /^["'].*["']$/.test(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect regex pattern (contains regex metacharacters)
|
||||
*/
|
||||
function detectRegex(query) {
|
||||
return /[.*+?^${}()|[\]\]/.test(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect natural language query (sentence structure, questions, multi-word phrases)
|
||||
*/
|
||||
function detectNaturalLanguage(query) {
|
||||
return query.split(/\s+/).length >= 3 || /\?$/.test(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect file path query (path separators, file extensions)
|
||||
*/
|
||||
function detectFilePath(query) {
|
||||
return /[/\]/.test(query) || /\.[a-z]{2,4}$/i.test(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect relationship query (import, export, dependency keywords)
|
||||
*/
|
||||
function detectRelationship(query) {
|
||||
return /(import|export|uses?|depends?|calls?|extends?)\s/i.test(query);
|
||||
}
|
||||
|
||||
/**
|
||||
* Classify query intent and recommend search mode
|
||||
* @param {string} query - Search query string
|
||||
* @returns {{mode: string, confidence: number, reasoning: string}}
|
||||
*/
|
||||
function classifyIntent(query) {
|
||||
// Initialize mode scores
|
||||
const scores = {
|
||||
exact: 0,
|
||||
fuzzy: 0,
|
||||
semantic: 0,
|
||||
graph: 0
|
||||
};
|
||||
|
||||
// Apply detection heuristics with weighted scoring
|
||||
if (detectLiteral(query)) {
|
||||
scores.exact += 0.8;
|
||||
}
|
||||
|
||||
if (detectRegex(query)) {
|
||||
scores.fuzzy += 0.7;
|
||||
}
|
||||
|
||||
if (detectNaturalLanguage(query)) {
|
||||
scores.semantic += 0.9;
|
||||
}
|
||||
|
||||
if (detectFilePath(query)) {
|
||||
scores.exact += 0.6;
|
||||
}
|
||||
|
||||
if (detectRelationship(query)) {
|
||||
scores.graph += 0.85;
|
||||
}
|
||||
|
||||
// Find mode with highest confidence score
|
||||
const mode = Object.keys(scores).reduce((a, b) => scores[a] > scores[b] ? a : b);
|
||||
const confidence = scores[mode];
|
||||
|
||||
// Build reasoning string
|
||||
const detectedPatterns = [];
|
||||
if (detectLiteral(query)) detectedPatterns.push('literal');
|
||||
if (detectRegex(query)) detectedPatterns.push('regex');
|
||||
if (detectNaturalLanguage(query)) detectedPatterns.push('natural language');
|
||||
if (detectFilePath(query)) detectedPatterns.push('file path');
|
||||
if (detectRelationship(query)) detectedPatterns.push('relationship');
|
||||
|
||||
const reasoning = `Query classified as ${mode} (confidence: ${confidence.toFixed(2)}, detected: ${detectedPatterns.join(', ')})`;
|
||||
|
||||
return { mode, confidence, reasoning };
|
||||
}
|
||||
|
||||
|
||||
n// Classification confidence threshold
|
||||
const CONFIDENCE_THRESHOLD = 0.7;
|
||||
|
||||
/**
|
||||
/**
|
||||
* Check if a tool is available in PATH
|
||||
* @param {string} toolName - Tool executable name
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function checkToolAvailability(toolName) {
|
||||
try {
|
||||
const isWindows = process.platform === 'win32';
|
||||
const command = isWindows ? 'where' : 'which';
|
||||
execSync(`${command} ${toolName}`, { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Build ripgrep command arguments
|
||||
* @param {Object} params - Search parameters
|
||||
* @returns {{command: string, args: string[]}}
|
||||
*/
|
||||
function buildRipgrepCommand(params) {
|
||||
const { query, paths = ['.'], contextLines = 0, maxResults = 100, includeHidden = false } = params;
|
||||
|
||||
const args = [
|
||||
'-n', // Show line numbers
|
||||
'--color=never', // Disable color output
|
||||
'--json' // Output in JSON format
|
||||
];
|
||||
|
||||
// Add context lines if specified
|
||||
if (contextLines > 0) {
|
||||
args.push('-C', contextLines.toString());
|
||||
}
|
||||
|
||||
// Add max results limit
|
||||
if (maxResults > 0) {
|
||||
args.push('--max-count', maxResults.toString());
|
||||
}
|
||||
|
||||
// Include hidden files if specified
|
||||
if (includeHidden) {
|
||||
args.push('--hidden');
|
||||
}
|
||||
|
||||
// Use literal/fixed string matching for exact mode
|
||||
args.push('-F', query);
|
||||
|
||||
// Add search paths
|
||||
args.push(...paths);
|
||||
|
||||
return { command: 'rg', args };
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode: auto - Intent classification and mode selection
|
||||
* Analyzes query to determine optimal search mode
|
||||
*/
|
||||
/**
|
||||
* Mode: auto - Intent classification and mode selection
|
||||
* Analyzes query to determine optimal search mode
|
||||
*/
|
||||
async function executeAutoMode(params) {
|
||||
const { query } = params;
|
||||
|
||||
// Classify intent
|
||||
const classification = classifyIntent(query);
|
||||
|
||||
// Route to appropriate mode based on classification
|
||||
switch (classification.mode) {
|
||||
case 'exact':
|
||||
// Execute exact mode and enrich result with classification metadata
|
||||
const exactResult = await executeExactMode(params);
|
||||
return {
|
||||
...exactResult,
|
||||
metadata: {
|
||||
...exactResult.metadata,
|
||||
classified_as: classification.mode,
|
||||
confidence: classification.confidence,
|
||||
reasoning: classification.reasoning
|
||||
}
|
||||
};
|
||||
|
||||
case 'fuzzy':
|
||||
case 'semantic':
|
||||
case 'graph':
|
||||
// These modes not yet implemented
|
||||
return {
|
||||
success: false,
|
||||
error: `${classification.mode} mode not yet implemented`,
|
||||
metadata: {
|
||||
classified_as: classification.mode,
|
||||
confidence: classification.confidence,
|
||||
reasoning: classification.reasoning
|
||||
}
|
||||
};
|
||||
|
||||
default:
|
||||
// Fallback to exact mode with warning
|
||||
const fallbackResult = await executeExactMode(params);
|
||||
return {
|
||||
...fallbackResult,
|
||||
metadata: {
|
||||
...fallbackResult.metadata,
|
||||
classified_as: 'exact',
|
||||
confidence: 0.5,
|
||||
reasoning: 'Fallback to exact mode due to unknown classification'
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Mode: exact - Precise file path and content matching
|
||||
* Uses ripgrep for literal string matching
|
||||
*/
|
||||
async function executeExactMode(params) {
|
||||
const { query, paths = [], contextLines = 0, maxResults = 100, includeHidden = false } = params;
|
||||
|
||||
// Check ripgrep availability
|
||||
if (!checkToolAvailability('rg')) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'ripgrep not available - please install ripgrep (rg) to use exact search mode'
|
||||
};
|
||||
}
|
||||
|
||||
// Build ripgrep command
|
||||
const { command, args } = buildRipgrepCommand({
|
||||
query,
|
||||
paths: paths.length > 0 ? paths : ['.'],
|
||||
contextLines,
|
||||
maxResults,
|
||||
includeHidden
|
||||
});
|
||||
|
||||
return new Promise((resolve) => {
|
||||
const child = spawn(command, args, {
|
||||
cwd: process.cwd(),
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
});
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
// Collect stdout
|
||||
child.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
|
||||
// Collect stderr
|
||||
child.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
|
||||
// Handle completion
|
||||
child.on('close', (code) => {
|
||||
// Parse ripgrep JSON output
|
||||
const results = [];
|
||||
|
||||
if (code === 0 || (code === 1 && stdout.trim())) {
|
||||
// Code 0: matches found, Code 1: no matches (but may have output)
|
||||
const lines = stdout.split('\n').filter(line => line.trim());
|
||||
|
||||
for (const line of lines) {
|
||||
try {
|
||||
const item = JSON.parse(line);
|
||||
|
||||
// Only process match type items
|
||||
if (item.type === 'match') {
|
||||
const match = {
|
||||
file: item.data.path.text,
|
||||
line: item.data.line_number,
|
||||
column: item.data.submatches && item.data.submatches[0] ? item.data.submatches[0].start + 1 : 1,
|
||||
content: item.data.lines.text.trim()
|
||||
};
|
||||
results.push(match);
|
||||
}
|
||||
} catch (err) {
|
||||
// Skip malformed JSON lines
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
resolve({
|
||||
success: true,
|
||||
results,
|
||||
metadata: {
|
||||
mode: 'exact',
|
||||
backend: 'ripgrep',
|
||||
count: results.length,
|
||||
query
|
||||
}
|
||||
});
|
||||
} else {
|
||||
// Error occurred
|
||||
resolve({
|
||||
success: false,
|
||||
error: `ripgrep execution failed with code ${code}: ${stderr}`,
|
||||
results: []
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle spawn errors
|
||||
child.on('error', (error) => {
|
||||
resolve({
|
||||
success: false,
|
||||
error: `Failed to spawn ripgrep: ${error.message}`,
|
||||
results: []
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode: fuzzy - Approximate matching with tolerance
|
||||
* Uses fuzzy matching algorithms for typo-tolerant search
|
||||
*/
|
||||
async function executeFuzzyMode(params) {
|
||||
const { query, paths = [], maxResults = 100 } = params;
|
||||
|
||||
// TODO: Implement fuzzy search
|
||||
// - Use fuse.js for content fuzzy matching
|
||||
// - Support approximate file path matching
|
||||
// - Configure similarity threshold
|
||||
// - Return ranked results
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: 'Fuzzy mode not implemented - fuzzy matching engine pending'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode: semantic - Natural language understanding search
|
||||
* Uses LLM or embeddings for semantic similarity
|
||||
*/
|
||||
async function executeSemanticMode(params) {
|
||||
const { query, paths = [], maxResults = 100 } = params;
|
||||
|
||||
// TODO: Implement semantic search
|
||||
// - Option 1: Use Gemini CLI via cli-executor.js
|
||||
// - Option 2: Use local embeddings (transformers.js)
|
||||
// - Generate query embedding
|
||||
// - Compare with code embeddings
|
||||
// - Return semantically similar results
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: 'Semantic mode not implemented - LLM/embedding integration pending'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Mode: graph - Dependency and relationship traversal
|
||||
* Analyzes code relationships (imports, exports, dependencies)
|
||||
*/
|
||||
async function executeGraphMode(params) {
|
||||
const { query, paths = [], maxResults = 100 } = params;
|
||||
|
||||
// TODO: Implement graph search
|
||||
// - Parse import/export statements
|
||||
// - Build dependency graph
|
||||
// - Traverse relationships
|
||||
// - Find related modules
|
||||
// - Return graph results
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: 'Graph mode not implemented - dependency analysis pending'
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Main execute function - routes to appropriate mode handler
|
||||
*/
|
||||
async function execute(params) {
|
||||
const { query, mode = 'auto', paths = [], contextLines = 0, maxResults = 100, includeHidden = false } = params;
|
||||
|
||||
// Validate required parameters
|
||||
if (!query || typeof query !== 'string') {
|
||||
throw new Error('Parameter "query" is required and must be a string');
|
||||
}
|
||||
|
||||
// Validate mode
|
||||
if (!SEARCH_MODES.includes(mode)) {
|
||||
throw new Error(`Invalid mode: ${mode}. Valid modes: ${SEARCH_MODES.join(', ')}`);
|
||||
}
|
||||
|
||||
// Route to mode-specific handler
|
||||
switch (mode) {
|
||||
case 'auto':
|
||||
return executeAutoMode(params);
|
||||
|
||||
case 'exact':
|
||||
return executeExactMode(params);
|
||||
|
||||
case 'fuzzy':
|
||||
return executeFuzzyMode(params);
|
||||
|
||||
case 'semantic':
|
||||
return executeSemanticMode(params);
|
||||
|
||||
case 'graph':
|
||||
return executeGraphMode(params);
|
||||
|
||||
default:
|
||||
throw new Error(`Unsupported mode: ${mode}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Smart Search Tool Definition
|
||||
*/
|
||||
export const smartSearchTool = {
|
||||
name: 'smart_search',
|
||||
description: `Unified search with intelligent mode selection.
|
||||
|
||||
Modes:
|
||||
- auto: Classify intent and recommend optimal search mode (default)
|
||||
- exact: Precise literal matching via ripgrep
|
||||
- fuzzy: Approximate matching with typo tolerance
|
||||
- semantic: Natural language understanding via LLM/embeddings
|
||||
- graph: Dependency relationship traversal
|
||||
|
||||
Features:
|
||||
- Multi-backend search coordination
|
||||
- Result fusion with RRF ranking
|
||||
- Configurable result limits and context`,
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
query: {
|
||||
type: 'string',
|
||||
description: 'Search query (file pattern, text content, or natural language)'
|
||||
},
|
||||
mode: {
|
||||
type: 'string',
|
||||
enum: SEARCH_MODES,
|
||||
description: 'Search mode (default: auto)',
|
||||
default: 'auto'
|
||||
},
|
||||
paths: {
|
||||
type: 'array',
|
||||
description: 'Paths to search within (default: current directory)',
|
||||
items: {
|
||||
type: 'string'
|
||||
},
|
||||
default: []
|
||||
},
|
||||
contextLines: {
|
||||
type: 'number',
|
||||
description: 'Number of context lines around matches (default: 0)',
|
||||
default: 0
|
||||
},
|
||||
maxResults: {
|
||||
type: 'number',
|
||||
description: 'Maximum number of results to return (default: 100)',
|
||||
default: 100
|
||||
},
|
||||
includeHidden: {
|
||||
type: 'boolean',
|
||||
description: 'Include hidden files/directories (default: false)',
|
||||
default: false
|
||||
}
|
||||
},
|
||||
required: ['query']
|
||||
},
|
||||
execute
|
||||
};
|
||||
152
ccw/src/tools/write-file.js
Normal file
152
ccw/src/tools/write-file.js
Normal file
@@ -0,0 +1,152 @@
|
||||
/**
|
||||
* Write File Tool - Create or overwrite files
|
||||
*
|
||||
* Features:
|
||||
* - Create new files or overwrite existing
|
||||
* - Auto-create parent directories
|
||||
* - Support for text content with proper encoding
|
||||
* - Optional backup before overwrite
|
||||
*/
|
||||
|
||||
import { writeFileSync, readFileSync, existsSync, mkdirSync, renameSync } from 'fs';
|
||||
import { resolve, isAbsolute, dirname, basename } from 'path';
|
||||
|
||||
/**
|
||||
* Ensure parent directory exists
|
||||
* @param {string} filePath - Path to file
|
||||
*/
|
||||
function ensureDir(filePath) {
|
||||
const dir = dirname(filePath);
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create backup of existing file
|
||||
* @param {string} filePath - Path to file
|
||||
* @returns {string|null} - Backup path or null if no backup created
|
||||
*/
|
||||
function createBackup(filePath) {
|
||||
if (!existsSync(filePath)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const dir = dirname(filePath);
|
||||
const name = basename(filePath);
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const backupPath = resolve(dir, `.${name}.${timestamp}.bak`);
|
||||
|
||||
try {
|
||||
const content = readFileSync(filePath);
|
||||
writeFileSync(backupPath, content);
|
||||
return backupPath;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to create backup: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute write file operation
|
||||
* @param {Object} params - Parameters
|
||||
* @returns {Promise<Object>} - Result
|
||||
*/
|
||||
async function execute(params) {
|
||||
const {
|
||||
path: filePath,
|
||||
content,
|
||||
createDirectories = true,
|
||||
backup = false,
|
||||
encoding = 'utf8'
|
||||
} = params;
|
||||
|
||||
if (!filePath) {
|
||||
throw new Error('Parameter "path" is required');
|
||||
}
|
||||
|
||||
if (content === undefined) {
|
||||
throw new Error('Parameter "content" is required');
|
||||
}
|
||||
|
||||
// Resolve path
|
||||
const resolvedPath = isAbsolute(filePath) ? filePath : resolve(process.cwd(), filePath);
|
||||
const fileExists = existsSync(resolvedPath);
|
||||
|
||||
// Create parent directories if needed
|
||||
if (createDirectories) {
|
||||
ensureDir(resolvedPath);
|
||||
} else if (!existsSync(dirname(resolvedPath))) {
|
||||
throw new Error(`Parent directory does not exist: ${dirname(resolvedPath)}`);
|
||||
}
|
||||
|
||||
// Create backup if requested and file exists
|
||||
let backupPath = null;
|
||||
if (backup && fileExists) {
|
||||
backupPath = createBackup(resolvedPath);
|
||||
}
|
||||
|
||||
// Write file
|
||||
try {
|
||||
writeFileSync(resolvedPath, content, { encoding });
|
||||
|
||||
return {
|
||||
success: true,
|
||||
path: resolvedPath,
|
||||
created: !fileExists,
|
||||
overwritten: fileExists,
|
||||
backupPath,
|
||||
bytes: Buffer.byteLength(content, encoding),
|
||||
message: fileExists
|
||||
? `Successfully overwrote ${filePath}${backupPath ? ` (backup: ${backupPath})` : ''}`
|
||||
: `Successfully created ${filePath}`
|
||||
};
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to write file: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write File Tool Definition
|
||||
*/
|
||||
export const writeFileTool = {
|
||||
name: 'write_file',
|
||||
description: `Create a new file or overwrite an existing file with content.
|
||||
|
||||
Features:
|
||||
- Creates parent directories automatically (configurable)
|
||||
- Optional backup before overwrite
|
||||
- Supports text content with proper encoding
|
||||
|
||||
Use with caution as it will overwrite existing files without warning unless backup is enabled.`,
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
path: {
|
||||
type: 'string',
|
||||
description: 'Path to the file to create or overwrite'
|
||||
},
|
||||
content: {
|
||||
type: 'string',
|
||||
description: 'Content to write to the file'
|
||||
},
|
||||
createDirectories: {
|
||||
type: 'boolean',
|
||||
description: 'Create parent directories if they do not exist (default: true)',
|
||||
default: true
|
||||
},
|
||||
backup: {
|
||||
type: 'boolean',
|
||||
description: 'Create backup of existing file before overwriting (default: false)',
|
||||
default: false
|
||||
},
|
||||
encoding: {
|
||||
type: 'string',
|
||||
description: 'File encoding (default: utf8)',
|
||||
default: 'utf8',
|
||||
enum: ['utf8', 'utf-8', 'ascii', 'latin1', 'binary', 'hex', 'base64']
|
||||
}
|
||||
},
|
||||
required: ['path', 'content']
|
||||
},
|
||||
execute
|
||||
};
|
||||
Reference in New Issue
Block a user