mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-12 02:37:45 +08:00
feat: Add context_cache MCP tool with simplified CLI options
Add context_cache MCP tool for caching files by @patterns: - pattern-parser.ts: Parse @expressions using glob - context-cache-store.ts: In-memory cache with TTL/LRU - context-cache.ts: MCP tool with pack/read/status/release/cleanup Simplify CLI cache options: - --cache now uses comma-separated format instead of JSON - Items starting with @ are patterns, others are text content - Add --inject-mode option (none/full/progressive) - Default: codex=full, gemini/qwen=none 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -181,12 +181,14 @@ export function run(argv: string[]): void {
|
|||||||
.option('--resume [id]', 'Resume previous session (empty=last, or execution ID, or comma-separated IDs for merge)')
|
.option('--resume [id]', 'Resume previous session (empty=last, or execution ID, or comma-separated IDs for merge)')
|
||||||
.option('--id <id>', 'Custom execution ID (e.g., IMPL-001-step1)')
|
.option('--id <id>', 'Custom execution ID (e.g., IMPL-001-step1)')
|
||||||
.option('--no-native', 'Force prompt concatenation instead of native resume')
|
.option('--no-native', 'Force prompt concatenation instead of native resume')
|
||||||
|
.option('--cache [items]', 'Cache: comma-separated @patterns and text content')
|
||||||
|
.option('--inject-mode <mode>', 'Inject mode: none, full, progressive (default: codex=full, others=none)')
|
||||||
// Storage options
|
// Storage options
|
||||||
.option('--project <path>', 'Project path for storage operations')
|
.option('--project <path>', 'Project path for storage operations')
|
||||||
.option('--force', 'Confirm destructive operations')
|
.option('--force', 'Confirm destructive operations')
|
||||||
.option('--cli-history', 'Target CLI history storage')
|
.option('--cli-history', 'Target CLI history storage')
|
||||||
.option('--memory', 'Target memory storage')
|
.option('--memory', 'Target memory storage')
|
||||||
.option('--cache', 'Target cache storage')
|
.option('--storage-cache', 'Target cache storage')
|
||||||
.option('--config', 'Target config storage')
|
.option('--config', 'Target config storage')
|
||||||
.action((subcommand, args, options) => cliCommand(subcommand, args, options));
|
.action((subcommand, args, options) => cliCommand(subcommand, args, options));
|
||||||
|
|
||||||
|
|||||||
@@ -78,6 +78,14 @@ interface CliExecOptions {
|
|||||||
resume?: string | boolean; // true = last, string = execution ID, comma-separated for merge
|
resume?: string | boolean; // true = last, string = execution ID, comma-separated for merge
|
||||||
id?: string; // Custom execution ID (e.g., IMPL-001-step1)
|
id?: string; // Custom execution ID (e.g., IMPL-001-step1)
|
||||||
noNative?: boolean; // Force prompt concatenation instead of native resume
|
noNative?: boolean; // Force prompt concatenation instead of native resume
|
||||||
|
cache?: string | boolean; // Cache: true = auto from CONTEXT, string = comma-separated patterns/content
|
||||||
|
injectMode?: 'none' | 'full' | 'progressive'; // Inject mode for cached content
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Cache configuration parsed from --cache */
|
||||||
|
interface CacheConfig {
|
||||||
|
patterns?: string[]; // @patterns to pack (items starting with @)
|
||||||
|
content?: string; // Additional text content (items not starting with @)
|
||||||
}
|
}
|
||||||
|
|
||||||
interface HistoryOptions {
|
interface HistoryOptions {
|
||||||
@@ -91,7 +99,7 @@ interface StorageOptions {
|
|||||||
project?: string;
|
project?: string;
|
||||||
cliHistory?: boolean;
|
cliHistory?: boolean;
|
||||||
memory?: boolean;
|
memory?: boolean;
|
||||||
cache?: boolean;
|
storageCache?: boolean;
|
||||||
config?: boolean;
|
config?: boolean;
|
||||||
force?: boolean;
|
force?: boolean;
|
||||||
}
|
}
|
||||||
@@ -173,15 +181,15 @@ async function showStorageInfo(): Promise<void> {
|
|||||||
* Clean storage
|
* Clean storage
|
||||||
*/
|
*/
|
||||||
async function cleanStorage(options: StorageOptions): Promise<void> {
|
async function cleanStorage(options: StorageOptions): Promise<void> {
|
||||||
const { all, project, force, cliHistory, memory, cache, config } = options;
|
const { all, project, force, cliHistory, memory, storageCache, config } = options;
|
||||||
|
|
||||||
// Determine what to clean
|
// Determine what to clean
|
||||||
const cleanTypes = {
|
const cleanTypes = {
|
||||||
cliHistory: cliHistory || (!cliHistory && !memory && !cache && !config),
|
cliHistory: cliHistory || (!cliHistory && !memory && !storageCache && !config),
|
||||||
memory: memory || (!cliHistory && !memory && !cache && !config),
|
memory: memory || (!cliHistory && !memory && !storageCache && !config),
|
||||||
cache: cache || (!cliHistory && !memory && !cache && !config),
|
cache: storageCache || (!cliHistory && !memory && !storageCache && !config),
|
||||||
config: config || false, // Config requires explicit flag
|
config: config || false, // Config requires explicit flag
|
||||||
all: !cliHistory && !memory && !cache && !config
|
all: !cliHistory && !memory && !storageCache && !config
|
||||||
};
|
};
|
||||||
|
|
||||||
if (project) {
|
if (project) {
|
||||||
@@ -383,7 +391,7 @@ async function statusAction(): Promise<void> {
|
|||||||
* @param {Object} options - CLI options
|
* @param {Object} options - CLI options
|
||||||
*/
|
*/
|
||||||
async function execAction(positionalPrompt: string | undefined, options: CliExecOptions): Promise<void> {
|
async function execAction(positionalPrompt: string | undefined, options: CliExecOptions): Promise<void> {
|
||||||
const { prompt: optionPrompt, file, tool = 'gemini', mode = 'analysis', model, cd, includeDirs, timeout, noStream, resume, id, noNative } = options;
|
const { prompt: optionPrompt, file, tool = 'gemini', mode = 'analysis', model, cd, includeDirs, timeout, noStream, resume, id, noNative, cache, injectMode } = options;
|
||||||
|
|
||||||
// Priority: 1. --file, 2. --prompt/-p option, 3. positional argument
|
// Priority: 1. --file, 2. --prompt/-p option, 3. positional argument
|
||||||
let finalPrompt: string | undefined;
|
let finalPrompt: string | undefined;
|
||||||
@@ -421,6 +429,128 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
|||||||
|
|
||||||
const prompt_to_use = finalPrompt || '';
|
const prompt_to_use = finalPrompt || '';
|
||||||
|
|
||||||
|
// Handle cache option: pack @patterns and/or content
|
||||||
|
let cacheSessionId: string | undefined;
|
||||||
|
let actualPrompt = prompt_to_use;
|
||||||
|
|
||||||
|
if (cache) {
|
||||||
|
const { handler: contextCacheHandler } = await import('../tools/context-cache.js');
|
||||||
|
|
||||||
|
// Parse cache config from comma-separated string
|
||||||
|
// Items starting with @ are patterns, others are text content
|
||||||
|
let cacheConfig: CacheConfig = {};
|
||||||
|
|
||||||
|
if (cache === true) {
|
||||||
|
// --cache without value: auto-extract from CONTEXT field
|
||||||
|
const contextMatch = prompt_to_use.match(/CONTEXT:\s*([^\n]+)/i);
|
||||||
|
if (contextMatch) {
|
||||||
|
const contextLine = contextMatch[1];
|
||||||
|
const patternMatches = contextLine.matchAll(/@[^\s|]+/g);
|
||||||
|
cacheConfig.patterns = Array.from(patternMatches).map(m => m[0]);
|
||||||
|
}
|
||||||
|
} else if (typeof cache === 'string') {
|
||||||
|
// Parse comma-separated items: @patterns and text content
|
||||||
|
const items = cache.split(',').map(s => s.trim()).filter(Boolean);
|
||||||
|
const patterns: string[] = [];
|
||||||
|
const contentParts: string[] = [];
|
||||||
|
|
||||||
|
for (const item of items) {
|
||||||
|
if (item.startsWith('@')) {
|
||||||
|
patterns.push(item);
|
||||||
|
} else {
|
||||||
|
contentParts.push(item);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (patterns.length > 0) {
|
||||||
|
cacheConfig.patterns = patterns;
|
||||||
|
}
|
||||||
|
if (contentParts.length > 0) {
|
||||||
|
cacheConfig.content = contentParts.join('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Also extract patterns from CONTEXT if not provided
|
||||||
|
if ((!cacheConfig.patterns || cacheConfig.patterns.length === 0) && prompt_to_use) {
|
||||||
|
const contextMatch = prompt_to_use.match(/CONTEXT:\s*([^\n]+)/i);
|
||||||
|
if (contextMatch) {
|
||||||
|
const contextLine = contextMatch[1];
|
||||||
|
const patternMatches = contextLine.matchAll(/@[^\s|]+/g);
|
||||||
|
cacheConfig.patterns = Array.from(patternMatches).map(m => m[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pack if we have patterns or content
|
||||||
|
if ((cacheConfig.patterns && cacheConfig.patterns.length > 0) || cacheConfig.content) {
|
||||||
|
const patternCount = cacheConfig.patterns?.length || 0;
|
||||||
|
const hasContent = !!cacheConfig.content;
|
||||||
|
console.log(chalk.gray(` Caching: ${patternCount} pattern(s)${hasContent ? ' + text content' : ''}...`));
|
||||||
|
|
||||||
|
const cacheResult = await contextCacheHandler({
|
||||||
|
operation: 'pack',
|
||||||
|
patterns: cacheConfig.patterns,
|
||||||
|
content: cacheConfig.content,
|
||||||
|
cwd: cd || process.cwd(),
|
||||||
|
include_dirs: includeDirs ? includeDirs.split(',') : undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (cacheResult.success && cacheResult.result) {
|
||||||
|
const packResult = cacheResult.result as { session_id: string; files_packed: number; total_bytes: number };
|
||||||
|
cacheSessionId = packResult.session_id;
|
||||||
|
console.log(chalk.gray(` Cached: ${packResult.files_packed} files, ${packResult.total_bytes} bytes`));
|
||||||
|
console.log(chalk.gray(` Session: ${cacheSessionId}`));
|
||||||
|
|
||||||
|
// Determine inject mode:
|
||||||
|
// --inject-mode explicitly set > tool default (codex=full, others=none)
|
||||||
|
const effectiveInjectMode = injectMode ?? (tool === 'codex' ? 'full' : 'none');
|
||||||
|
|
||||||
|
if (effectiveInjectMode !== 'none' && cacheSessionId) {
|
||||||
|
if (effectiveInjectMode === 'full') {
|
||||||
|
// Read full cache content
|
||||||
|
const readResult = await contextCacheHandler({
|
||||||
|
operation: 'read',
|
||||||
|
session_id: cacheSessionId,
|
||||||
|
offset: 0,
|
||||||
|
limit: 1024 * 1024, // 1MB max
|
||||||
|
});
|
||||||
|
|
||||||
|
if (readResult.success && readResult.result) {
|
||||||
|
const { content: cachedContent, total_bytes } = readResult.result as { content: string; total_bytes: number };
|
||||||
|
console.log(chalk.gray(` Injecting ${total_bytes} bytes (full mode)...`));
|
||||||
|
actualPrompt = `=== CACHED CONTEXT (${packResult.files_packed} files) ===\n${cachedContent}\n\n=== USER PROMPT ===\n${prompt_to_use}`;
|
||||||
|
}
|
||||||
|
} else if (effectiveInjectMode === 'progressive') {
|
||||||
|
// Progressive mode: read first page only (64KB default)
|
||||||
|
const pageLimit = 65536;
|
||||||
|
const readResult = await contextCacheHandler({
|
||||||
|
operation: 'read',
|
||||||
|
session_id: cacheSessionId,
|
||||||
|
offset: 0,
|
||||||
|
limit: pageLimit,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (readResult.success && readResult.result) {
|
||||||
|
const { content: cachedContent, total_bytes, has_more, next_offset } = readResult.result as {
|
||||||
|
content: string; total_bytes: number; has_more: boolean; next_offset: number | null
|
||||||
|
};
|
||||||
|
console.log(chalk.gray(` Injecting ${cachedContent.length}/${total_bytes} bytes (progressive mode)...`));
|
||||||
|
|
||||||
|
const moreInfo = has_more
|
||||||
|
? `\n[... ${total_bytes - cachedContent.length} more bytes available via: context_cache(operation="read", session_id="${cacheSessionId}", offset=${next_offset}) ...]`
|
||||||
|
: '';
|
||||||
|
|
||||||
|
actualPrompt = `=== CACHED CONTEXT (${packResult.files_packed} files, progressive) ===\n${cachedContent}${moreInfo}\n\n=== USER PROMPT ===\n${prompt_to_use}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
} else {
|
||||||
|
console.log(chalk.yellow(` Cache warning: ${cacheResult.error}`));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Parse resume IDs for merge scenario
|
// Parse resume IDs for merge scenario
|
||||||
const resumeIds = resume && typeof resume === 'string' ? resume.split(',').map(s => s.trim()).filter(Boolean) : [];
|
const resumeIds = resume && typeof resume === 'string' ? resume.split(',').map(s => s.trim()).filter(Boolean) : [];
|
||||||
const isMerge = resumeIds.length > 1;
|
const isMerge = resumeIds.length > 1;
|
||||||
@@ -462,7 +592,7 @@ async function execAction(positionalPrompt: string | undefined, options: CliExec
|
|||||||
try {
|
try {
|
||||||
const result = await cliExecutorTool.execute({
|
const result = await cliExecutorTool.execute({
|
||||||
tool,
|
tool,
|
||||||
prompt: prompt_to_use,
|
prompt: actualPrompt,
|
||||||
mode,
|
mode,
|
||||||
model,
|
model,
|
||||||
cd,
|
cd,
|
||||||
@@ -727,12 +857,26 @@ export async function cliCommand(
|
|||||||
console.log(chalk.gray(' --includeDirs <dirs> Additional directories'));
|
console.log(chalk.gray(' --includeDirs <dirs> Additional directories'));
|
||||||
console.log(chalk.gray(' --timeout <ms> Timeout (default: 300000)'));
|
console.log(chalk.gray(' --timeout <ms> Timeout (default: 300000)'));
|
||||||
console.log(chalk.gray(' --resume [id] Resume previous session'));
|
console.log(chalk.gray(' --resume [id] Resume previous session'));
|
||||||
|
console.log(chalk.gray(' --cache <items> Cache: comma-separated @patterns and text'));
|
||||||
|
console.log(chalk.gray(' --inject-mode <m> Inject mode: none, full, progressive'));
|
||||||
|
console.log();
|
||||||
|
console.log(' Cache format:');
|
||||||
|
console.log(chalk.gray(' --cache "@src/**/*.ts,@CLAUDE.md" # @patterns to pack'));
|
||||||
|
console.log(chalk.gray(' --cache "@src/**/*,extra context" # patterns + text content'));
|
||||||
|
console.log(chalk.gray(' --cache # auto from CONTEXT field'));
|
||||||
|
console.log();
|
||||||
|
console.log(' Inject modes:');
|
||||||
|
console.log(chalk.gray(' none: cache only, no injection (default for gemini/qwen)'));
|
||||||
|
console.log(chalk.gray(' full: inject all cached content (default for codex)'));
|
||||||
|
console.log(chalk.gray(' progressive: inject first 64KB with MCP continuation hint'));
|
||||||
console.log();
|
console.log();
|
||||||
console.log(' Examples:');
|
console.log(' Examples:');
|
||||||
console.log(chalk.gray(' ccw cli -p "Analyze auth module" --tool gemini'));
|
console.log(chalk.gray(' ccw cli -p "Analyze auth module" --tool gemini'));
|
||||||
console.log(chalk.gray(' ccw cli -f prompt.txt --tool codex --mode write'));
|
console.log(chalk.gray(' ccw cli -f prompt.txt --tool codex --mode write'));
|
||||||
console.log(chalk.gray(' ccw cli -p "$(cat template.md)" --tool gemini'));
|
console.log(chalk.gray(' ccw cli -p "$(cat template.md)" --tool gemini'));
|
||||||
console.log(chalk.gray(' ccw cli --resume --tool gemini'));
|
console.log(chalk.gray(' ccw cli --resume --tool gemini'));
|
||||||
|
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*.ts" --tool codex'));
|
||||||
|
console.log(chalk.gray(' ccw cli -p "..." --cache "@src/**/*" --inject-mode progressive --tool gemini'));
|
||||||
console.log();
|
console.log();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ const ENV_PROJECT_ROOT = 'CCW_PROJECT_ROOT';
|
|||||||
const ENV_ALLOWED_DIRS = 'CCW_ALLOWED_DIRS';
|
const ENV_ALLOWED_DIRS = 'CCW_ALLOWED_DIRS';
|
||||||
|
|
||||||
// Default enabled tools (core set)
|
// Default enabled tools (core set)
|
||||||
const DEFAULT_TOOLS: string[] = ['write_file', 'edit_file', 'read_file', 'smart_search', 'core_memory'];
|
const DEFAULT_TOOLS: string[] = ['write_file', 'edit_file', 'read_file', 'smart_search', 'core_memory', 'context_cache'];
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get list of enabled tools from environment or defaults
|
* Get list of enabled tools from environment or defaults
|
||||||
|
|||||||
368
ccw/src/tools/context-cache-store.ts
Normal file
368
ccw/src/tools/context-cache-store.ts
Normal file
@@ -0,0 +1,368 @@
|
|||||||
|
/**
|
||||||
|
* Context Cache Store - In-memory cache with TTL and LRU eviction
|
||||||
|
* Stores packed file contents with session-based lifecycle management
|
||||||
|
*/
|
||||||
|
|
||||||
|
/** Cache entry metadata */
|
||||||
|
export interface CacheMetadata {
|
||||||
|
files: string[]; // Source file paths
|
||||||
|
patterns: string[]; // Original @patterns
|
||||||
|
total_bytes: number; // Total content bytes
|
||||||
|
file_count: number; // Number of files packed
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Cache entry structure */
|
||||||
|
export interface CacheEntry {
|
||||||
|
session_id: string;
|
||||||
|
created_at: number; // Timestamp ms
|
||||||
|
accessed_at: number; // Last access timestamp
|
||||||
|
ttl: number; // TTL in ms
|
||||||
|
content: string; // Packed file content
|
||||||
|
metadata: CacheMetadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Paginated read result */
|
||||||
|
export interface PagedReadResult {
|
||||||
|
content: string; // Current page content
|
||||||
|
offset: number; // Current byte offset
|
||||||
|
limit: number; // Requested bytes
|
||||||
|
total_bytes: number; // Total content bytes
|
||||||
|
has_more: boolean; // Has more content
|
||||||
|
next_offset: number | null; // Next page offset (null if no more)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Cache status info */
|
||||||
|
export interface CacheStatus {
|
||||||
|
entries: number; // Total cache entries
|
||||||
|
total_bytes: number; // Total bytes cached
|
||||||
|
oldest_session: string | null;
|
||||||
|
newest_session: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Session status info */
|
||||||
|
export interface SessionStatus {
|
||||||
|
session_id: string;
|
||||||
|
exists: boolean;
|
||||||
|
files?: string[];
|
||||||
|
file_count?: number;
|
||||||
|
total_bytes?: number;
|
||||||
|
created_at?: string;
|
||||||
|
expires_at?: string;
|
||||||
|
accessed_at?: string;
|
||||||
|
ttl_remaining_ms?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Default configuration */
|
||||||
|
const DEFAULT_MAX_ENTRIES = 100;
|
||||||
|
const DEFAULT_TTL_MS = 30 * 60 * 1000; // 30 minutes
|
||||||
|
const DEFAULT_PAGE_SIZE = 65536; // 64KB
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Context Cache Store singleton
|
||||||
|
* Manages in-memory cache with TTL expiration and LRU eviction
|
||||||
|
*/
|
||||||
|
class ContextCacheStore {
|
||||||
|
private cache: Map<string, CacheEntry> = new Map();
|
||||||
|
private maxEntries: number;
|
||||||
|
private defaultTTL: number;
|
||||||
|
private cleanupInterval: NodeJS.Timeout | null = null;
|
||||||
|
|
||||||
|
constructor(options: {
|
||||||
|
maxEntries?: number;
|
||||||
|
defaultTTL?: number;
|
||||||
|
cleanupIntervalMs?: number;
|
||||||
|
} = {}) {
|
||||||
|
this.maxEntries = options.maxEntries ?? DEFAULT_MAX_ENTRIES;
|
||||||
|
this.defaultTTL = options.defaultTTL ?? DEFAULT_TTL_MS;
|
||||||
|
|
||||||
|
// Start periodic cleanup
|
||||||
|
const cleanupMs = options.cleanupIntervalMs ?? 60000; // 1 minute
|
||||||
|
this.cleanupInterval = setInterval(() => {
|
||||||
|
this.cleanupExpired();
|
||||||
|
}, cleanupMs);
|
||||||
|
|
||||||
|
// Allow cleanup to not keep process alive
|
||||||
|
if (this.cleanupInterval.unref) {
|
||||||
|
this.cleanupInterval.unref();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store packed content in cache
|
||||||
|
*/
|
||||||
|
set(
|
||||||
|
sessionId: string,
|
||||||
|
content: string,
|
||||||
|
metadata: CacheMetadata,
|
||||||
|
ttl?: number
|
||||||
|
): CacheEntry {
|
||||||
|
const now = Date.now();
|
||||||
|
const entryTTL = ttl ?? this.defaultTTL;
|
||||||
|
|
||||||
|
// Evict if at capacity
|
||||||
|
if (this.cache.size >= this.maxEntries && !this.cache.has(sessionId)) {
|
||||||
|
this.evictOldest();
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry: CacheEntry = {
|
||||||
|
session_id: sessionId,
|
||||||
|
created_at: now,
|
||||||
|
accessed_at: now,
|
||||||
|
ttl: entryTTL,
|
||||||
|
content,
|
||||||
|
metadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.cache.set(sessionId, entry);
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cache entry by session ID
|
||||||
|
*/
|
||||||
|
get(sessionId: string): CacheEntry | null {
|
||||||
|
const entry = this.cache.get(sessionId);
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check TTL expiration
|
||||||
|
if (this.isExpired(entry)) {
|
||||||
|
this.cache.delete(sessionId);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update access time (LRU)
|
||||||
|
entry.accessed_at = Date.now();
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read content with pagination
|
||||||
|
*/
|
||||||
|
read(
|
||||||
|
sessionId: string,
|
||||||
|
offset: number = 0,
|
||||||
|
limit: number = DEFAULT_PAGE_SIZE
|
||||||
|
): PagedReadResult | null {
|
||||||
|
const entry = this.get(sessionId);
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = entry.content;
|
||||||
|
const totalBytes = Buffer.byteLength(content, 'utf-8');
|
||||||
|
|
||||||
|
// Handle byte-based offset for UTF-8
|
||||||
|
// For simplicity, we use character-based slicing
|
||||||
|
// This is approximate but works for most use cases
|
||||||
|
const charOffset = Math.min(offset, content.length);
|
||||||
|
const charLimit = Math.min(limit, content.length - charOffset);
|
||||||
|
|
||||||
|
const pageContent = content.slice(charOffset, charOffset + charLimit);
|
||||||
|
const endOffset = charOffset + pageContent.length;
|
||||||
|
const hasMore = endOffset < content.length;
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: pageContent,
|
||||||
|
offset: charOffset,
|
||||||
|
limit: charLimit,
|
||||||
|
total_bytes: totalBytes,
|
||||||
|
has_more: hasMore,
|
||||||
|
next_offset: hasMore ? endOffset : null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Release (delete) cache entry
|
||||||
|
*/
|
||||||
|
release(sessionId: string): { released: boolean; freed_bytes: number } {
|
||||||
|
const entry = this.cache.get(sessionId);
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
return { released: false, freed_bytes: 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
const freedBytes = entry.metadata.total_bytes;
|
||||||
|
this.cache.delete(sessionId);
|
||||||
|
|
||||||
|
return { released: true, freed_bytes: freedBytes };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get session status
|
||||||
|
*/
|
||||||
|
getSessionStatus(sessionId: string): SessionStatus {
|
||||||
|
const entry = this.cache.get(sessionId);
|
||||||
|
|
||||||
|
if (!entry) {
|
||||||
|
return { session_id: sessionId, exists: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if expired
|
||||||
|
if (this.isExpired(entry)) {
|
||||||
|
this.cache.delete(sessionId);
|
||||||
|
return { session_id: sessionId, exists: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
const now = Date.now();
|
||||||
|
const expiresAt = entry.created_at + entry.ttl;
|
||||||
|
const ttlRemaining = Math.max(0, expiresAt - now);
|
||||||
|
|
||||||
|
return {
|
||||||
|
session_id: sessionId,
|
||||||
|
exists: true,
|
||||||
|
files: entry.metadata.files,
|
||||||
|
file_count: entry.metadata.file_count,
|
||||||
|
total_bytes: entry.metadata.total_bytes,
|
||||||
|
created_at: new Date(entry.created_at).toISOString(),
|
||||||
|
expires_at: new Date(expiresAt).toISOString(),
|
||||||
|
accessed_at: new Date(entry.accessed_at).toISOString(),
|
||||||
|
ttl_remaining_ms: ttlRemaining,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get overall cache status
|
||||||
|
*/
|
||||||
|
getStatus(): CacheStatus {
|
||||||
|
let totalBytes = 0;
|
||||||
|
let oldest: CacheEntry | null = null;
|
||||||
|
let newest: CacheEntry | null = null;
|
||||||
|
|
||||||
|
for (const entry of this.cache.values()) {
|
||||||
|
// Skip expired entries
|
||||||
|
if (this.isExpired(entry)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
totalBytes += entry.metadata.total_bytes;
|
||||||
|
|
||||||
|
if (!oldest || entry.created_at < oldest.created_at) {
|
||||||
|
oldest = entry;
|
||||||
|
}
|
||||||
|
if (!newest || entry.created_at > newest.created_at) {
|
||||||
|
newest = entry;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
entries: this.cache.size,
|
||||||
|
total_bytes: totalBytes,
|
||||||
|
oldest_session: oldest?.session_id ?? null,
|
||||||
|
newest_session: newest?.session_id ?? null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup expired entries
|
||||||
|
*/
|
||||||
|
cleanupExpired(): { removed: number } {
|
||||||
|
let removed = 0;
|
||||||
|
const now = Date.now();
|
||||||
|
|
||||||
|
for (const [sessionId, entry] of this.cache.entries()) {
|
||||||
|
if (this.isExpired(entry, now)) {
|
||||||
|
this.cache.delete(sessionId);
|
||||||
|
removed++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { removed };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all cache entries
|
||||||
|
*/
|
||||||
|
clear(): { removed: number } {
|
||||||
|
const count = this.cache.size;
|
||||||
|
this.cache.clear();
|
||||||
|
return { removed: count };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if entry is expired
|
||||||
|
*/
|
||||||
|
private isExpired(entry: CacheEntry, now?: number): boolean {
|
||||||
|
const currentTime = now ?? Date.now();
|
||||||
|
return currentTime > entry.created_at + entry.ttl;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evict oldest entry (LRU)
|
||||||
|
*/
|
||||||
|
private evictOldest(): void {
|
||||||
|
let oldest: [string, CacheEntry] | null = null;
|
||||||
|
|
||||||
|
for (const [sessionId, entry] of this.cache.entries()) {
|
||||||
|
if (!oldest || entry.accessed_at < oldest[1].accessed_at) {
|
||||||
|
oldest = [sessionId, entry];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (oldest) {
|
||||||
|
this.cache.delete(oldest[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop cleanup timer (for graceful shutdown)
|
||||||
|
*/
|
||||||
|
destroy(): void {
|
||||||
|
if (this.cleanupInterval) {
|
||||||
|
clearInterval(this.cleanupInterval);
|
||||||
|
this.cleanupInterval = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all session IDs
|
||||||
|
*/
|
||||||
|
listSessions(): string[] {
|
||||||
|
return Array.from(this.cache.keys());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if session exists and is valid
|
||||||
|
*/
|
||||||
|
has(sessionId: string): boolean {
|
||||||
|
const entry = this.cache.get(sessionId);
|
||||||
|
if (!entry) return false;
|
||||||
|
if (this.isExpired(entry)) {
|
||||||
|
this.cache.delete(sessionId);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Singleton instance
|
||||||
|
let cacheInstance: ContextCacheStore | null = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the singleton cache instance
|
||||||
|
*/
|
||||||
|
export function getContextCacheStore(options?: {
|
||||||
|
maxEntries?: number;
|
||||||
|
defaultTTL?: number;
|
||||||
|
cleanupIntervalMs?: number;
|
||||||
|
}): ContextCacheStore {
|
||||||
|
if (!cacheInstance) {
|
||||||
|
cacheInstance = new ContextCacheStore(options);
|
||||||
|
}
|
||||||
|
return cacheInstance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset the cache instance (for testing)
|
||||||
|
*/
|
||||||
|
export function resetContextCacheStore(): void {
|
||||||
|
if (cacheInstance) {
|
||||||
|
cacheInstance.destroy();
|
||||||
|
cacheInstance = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export { ContextCacheStore };
|
||||||
393
ccw/src/tools/context-cache.ts
Normal file
393
ccw/src/tools/context-cache.ts
Normal file
@@ -0,0 +1,393 @@
|
|||||||
|
/**
|
||||||
|
* Context Cache MCP Tool
|
||||||
|
* Pack files by @patterns, cache in memory, paginated read by session ID
|
||||||
|
*
|
||||||
|
* Operations:
|
||||||
|
* - pack: Parse @patterns and cache file contents
|
||||||
|
* - read: Paginated read from cache by session ID
|
||||||
|
* - status: Get cache/session status
|
||||||
|
* - release: Release session cache
|
||||||
|
* - cleanup: Cleanup expired caches
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { z } from 'zod';
|
||||||
|
import type { ToolSchema, ToolResult } from '../types/tool.js';
|
||||||
|
import { parseAndPack } from './pattern-parser.js';
|
||||||
|
import {
|
||||||
|
getContextCacheStore,
|
||||||
|
type CacheMetadata,
|
||||||
|
type PagedReadResult,
|
||||||
|
type CacheStatus,
|
||||||
|
type SessionStatus,
|
||||||
|
} from './context-cache-store.js';
|
||||||
|
|
||||||
|
// Zod schema for parameter validation
|
||||||
|
const OperationEnum = z.enum(['pack', 'read', 'status', 'release', 'cleanup']);
|
||||||
|
|
||||||
|
const ParamsSchema = z.object({
|
||||||
|
operation: OperationEnum,
|
||||||
|
// Pack parameters
|
||||||
|
patterns: z.array(z.string()).optional(),
|
||||||
|
content: z.string().optional(), // Direct text content to cache
|
||||||
|
session_id: z.string().optional(),
|
||||||
|
cwd: z.string().optional(),
|
||||||
|
include_dirs: z.array(z.string()).optional(),
|
||||||
|
ttl: z.number().optional(),
|
||||||
|
include_metadata: z.boolean().optional().default(true),
|
||||||
|
max_file_size: z.number().optional(),
|
||||||
|
// Read parameters
|
||||||
|
offset: z.number().optional().default(0),
|
||||||
|
limit: z.number().optional().default(65536), // 64KB default
|
||||||
|
});
|
||||||
|
|
||||||
|
type Params = z.infer<typeof ParamsSchema>;
|
||||||
|
|
||||||
|
// Result types
|
||||||
|
interface PackResult {
|
||||||
|
operation: 'pack';
|
||||||
|
session_id: string;
|
||||||
|
files_packed: number;
|
||||||
|
files_skipped: number;
|
||||||
|
total_bytes: number;
|
||||||
|
patterns_matched: number;
|
||||||
|
patterns_failed: number;
|
||||||
|
expires_at: string;
|
||||||
|
errors?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ReadResult {
|
||||||
|
operation: 'read';
|
||||||
|
session_id: string;
|
||||||
|
content: string;
|
||||||
|
offset: number;
|
||||||
|
limit: number;
|
||||||
|
total_bytes: number;
|
||||||
|
has_more: boolean;
|
||||||
|
next_offset: number | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface StatusResult {
|
||||||
|
operation: 'status';
|
||||||
|
session_id?: string;
|
||||||
|
session?: SessionStatus;
|
||||||
|
cache?: CacheStatus;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ReleaseResult {
|
||||||
|
operation: 'release';
|
||||||
|
session_id: string;
|
||||||
|
released: boolean;
|
||||||
|
freed_bytes: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CleanupResult {
|
||||||
|
operation: 'cleanup';
|
||||||
|
removed: number;
|
||||||
|
remaining: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
type OperationResult = PackResult | ReadResult | StatusResult | ReleaseResult | CleanupResult;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate session ID if not provided
|
||||||
|
*/
|
||||||
|
function generateSessionId(): string {
|
||||||
|
return `ctx-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation: pack
|
||||||
|
* Parse @patterns and/or cache text content directly
|
||||||
|
*/
|
||||||
|
async function executePack(params: Params): Promise<PackResult> {
|
||||||
|
const {
|
||||||
|
patterns,
|
||||||
|
content,
|
||||||
|
session_id,
|
||||||
|
cwd,
|
||||||
|
include_dirs,
|
||||||
|
ttl,
|
||||||
|
include_metadata,
|
||||||
|
max_file_size,
|
||||||
|
} = params;
|
||||||
|
|
||||||
|
// Require at least patterns or content
|
||||||
|
if ((!patterns || patterns.length === 0) && !content) {
|
||||||
|
throw new Error('Either "patterns" or "content" is required for pack operation');
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = session_id || generateSessionId();
|
||||||
|
const store = getContextCacheStore();
|
||||||
|
|
||||||
|
let finalContent = '';
|
||||||
|
let filesPacked = 0;
|
||||||
|
let filesSkipped = 0;
|
||||||
|
let totalBytes = 0;
|
||||||
|
let patternsMatched = 0;
|
||||||
|
let patternsFailed = 0;
|
||||||
|
let errors: string[] = [];
|
||||||
|
let files: string[] = [];
|
||||||
|
let parsedPatterns: string[] = [];
|
||||||
|
|
||||||
|
// Pack files from patterns if provided
|
||||||
|
if (patterns && patterns.length > 0) {
|
||||||
|
const result = await parseAndPack(patterns, {
|
||||||
|
cwd: cwd || process.cwd(),
|
||||||
|
includeDirs: include_dirs,
|
||||||
|
includeMetadata: include_metadata,
|
||||||
|
maxFileSize: max_file_size,
|
||||||
|
});
|
||||||
|
|
||||||
|
finalContent = result.content;
|
||||||
|
filesPacked = result.packedFiles.length;
|
||||||
|
filesSkipped = result.skippedFiles.length;
|
||||||
|
totalBytes = result.totalBytes;
|
||||||
|
patternsMatched = result.parseResult.stats.matched_patterns;
|
||||||
|
patternsFailed = result.parseResult.stats.total_patterns - patternsMatched;
|
||||||
|
errors = result.parseResult.errors;
|
||||||
|
files = result.packedFiles;
|
||||||
|
parsedPatterns = result.parseResult.patterns;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Append direct content if provided
|
||||||
|
if (content) {
|
||||||
|
if (finalContent) {
|
||||||
|
finalContent += '\n\n=== ADDITIONAL CONTENT ===\n' + content;
|
||||||
|
} else {
|
||||||
|
finalContent = content;
|
||||||
|
}
|
||||||
|
totalBytes += Buffer.byteLength(content, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store in cache
|
||||||
|
const metadata: CacheMetadata = {
|
||||||
|
files,
|
||||||
|
patterns: parsedPatterns,
|
||||||
|
total_bytes: totalBytes,
|
||||||
|
file_count: filesPacked,
|
||||||
|
};
|
||||||
|
|
||||||
|
const entry = store.set(sessionId, finalContent, metadata, ttl);
|
||||||
|
const expiresAt = new Date(entry.created_at + entry.ttl).toISOString();
|
||||||
|
|
||||||
|
return {
|
||||||
|
operation: 'pack',
|
||||||
|
session_id: sessionId,
|
||||||
|
files_packed: filesPacked,
|
||||||
|
files_skipped: filesSkipped,
|
||||||
|
total_bytes: totalBytes,
|
||||||
|
patterns_matched: patternsMatched,
|
||||||
|
patterns_failed: patternsFailed,
|
||||||
|
expires_at: expiresAt,
|
||||||
|
errors: errors.length > 0 ? errors : undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation: read
|
||||||
|
* Paginated read from cache
|
||||||
|
*/
|
||||||
|
function executeRead(params: Params): ReadResult {
|
||||||
|
const { session_id, offset, limit } = params;
|
||||||
|
|
||||||
|
if (!session_id) {
|
||||||
|
throw new Error('Parameter "session_id" is required for read operation');
|
||||||
|
}
|
||||||
|
|
||||||
|
const store = getContextCacheStore();
|
||||||
|
const result = store.read(session_id, offset, limit);
|
||||||
|
|
||||||
|
if (!result) {
|
||||||
|
throw new Error(`Session "${session_id}" not found or expired`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
operation: 'read',
|
||||||
|
session_id,
|
||||||
|
content: result.content,
|
||||||
|
offset: result.offset,
|
||||||
|
limit: result.limit,
|
||||||
|
total_bytes: result.total_bytes,
|
||||||
|
has_more: result.has_more,
|
||||||
|
next_offset: result.next_offset,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation: status
|
||||||
|
* Get session or overall cache status
|
||||||
|
*/
|
||||||
|
function executeStatus(params: Params): StatusResult {
|
||||||
|
const { session_id } = params;
|
||||||
|
const store = getContextCacheStore();
|
||||||
|
|
||||||
|
if (session_id) {
|
||||||
|
// Session-specific status
|
||||||
|
const sessionStatus = store.getSessionStatus(session_id);
|
||||||
|
return {
|
||||||
|
operation: 'status',
|
||||||
|
session_id,
|
||||||
|
session: sessionStatus,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overall cache status
|
||||||
|
const cacheStatus = store.getStatus();
|
||||||
|
return {
|
||||||
|
operation: 'status',
|
||||||
|
cache: cacheStatus,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation: release
|
||||||
|
* Release session cache
|
||||||
|
*/
|
||||||
|
function executeRelease(params: Params): ReleaseResult {
|
||||||
|
const { session_id } = params;
|
||||||
|
|
||||||
|
if (!session_id) {
|
||||||
|
throw new Error('Parameter "session_id" is required for release operation');
|
||||||
|
}
|
||||||
|
|
||||||
|
const store = getContextCacheStore();
|
||||||
|
const result = store.release(session_id);
|
||||||
|
|
||||||
|
return {
|
||||||
|
operation: 'release',
|
||||||
|
session_id,
|
||||||
|
released: result.released,
|
||||||
|
freed_bytes: result.freed_bytes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Operation: cleanup
|
||||||
|
* Cleanup expired caches
|
||||||
|
*/
|
||||||
|
function executeCleanup(): CleanupResult {
|
||||||
|
const store = getContextCacheStore();
|
||||||
|
const result = store.cleanupExpired();
|
||||||
|
const status = store.getStatus();
|
||||||
|
|
||||||
|
return {
|
||||||
|
operation: 'cleanup',
|
||||||
|
removed: result.removed,
|
||||||
|
remaining: status.entries,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Route to operation handler
|
||||||
|
*/
|
||||||
|
async function execute(params: Params): Promise<OperationResult> {
|
||||||
|
const { operation } = params;
|
||||||
|
|
||||||
|
switch (operation) {
|
||||||
|
case 'pack':
|
||||||
|
return executePack(params);
|
||||||
|
case 'read':
|
||||||
|
return executeRead(params);
|
||||||
|
case 'status':
|
||||||
|
return executeStatus(params);
|
||||||
|
case 'release':
|
||||||
|
return executeRelease(params);
|
||||||
|
case 'cleanup':
|
||||||
|
return executeCleanup();
|
||||||
|
default:
|
||||||
|
throw new Error(
|
||||||
|
`Unknown operation: ${operation}. Valid operations: pack, read, status, release, cleanup`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MCP Tool Schema
|
||||||
|
export const schema: ToolSchema = {
|
||||||
|
name: 'context_cache',
|
||||||
|
description: `Context file cache with @pattern and text content support, paginated reading.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
context_cache(operation="pack", patterns=["@src/**/*.ts"], session_id="...")
|
||||||
|
context_cache(operation="pack", content="text to cache", session_id="...")
|
||||||
|
context_cache(operation="pack", patterns=["@src/**/*.ts"], content="extra text")
|
||||||
|
context_cache(operation="read", session_id="...", offset=0, limit=65536)
|
||||||
|
context_cache(operation="status", session_id="...")
|
||||||
|
context_cache(operation="release", session_id="...")
|
||||||
|
context_cache(operation="cleanup")
|
||||||
|
|
||||||
|
Pattern syntax:
|
||||||
|
@src/**/*.ts - All TypeScript files in src
|
||||||
|
@CLAUDE.md - Specific file
|
||||||
|
@../shared/**/* - Sibling directory (needs include_dirs)`,
|
||||||
|
inputSchema: {
|
||||||
|
type: 'object',
|
||||||
|
properties: {
|
||||||
|
operation: {
|
||||||
|
type: 'string',
|
||||||
|
enum: ['pack', 'read', 'status', 'release', 'cleanup'],
|
||||||
|
description: 'Operation to perform',
|
||||||
|
},
|
||||||
|
patterns: {
|
||||||
|
type: 'array',
|
||||||
|
items: { type: 'string' },
|
||||||
|
description: '@patterns to pack (e.g., ["@src/**/*.ts"]). Either patterns or content required for pack.',
|
||||||
|
},
|
||||||
|
content: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Direct text content to cache. Either patterns or content required for pack.',
|
||||||
|
},
|
||||||
|
session_id: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Cache session ID. Auto-generated for pack if not provided.',
|
||||||
|
},
|
||||||
|
cwd: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Working directory for pattern resolution (default: process.cwd())',
|
||||||
|
},
|
||||||
|
include_dirs: {
|
||||||
|
type: 'array',
|
||||||
|
items: { type: 'string' },
|
||||||
|
description: 'Additional directories to include for pattern matching',
|
||||||
|
},
|
||||||
|
ttl: {
|
||||||
|
type: 'number',
|
||||||
|
description: 'Cache TTL in milliseconds (default: 1800000 = 30min)',
|
||||||
|
},
|
||||||
|
include_metadata: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'Include file metadata headers in packed content (default: true)',
|
||||||
|
},
|
||||||
|
max_file_size: {
|
||||||
|
type: 'number',
|
||||||
|
description: 'Max file size in bytes to include (default: 1MB). Larger files are skipped.',
|
||||||
|
},
|
||||||
|
offset: {
|
||||||
|
type: 'number',
|
||||||
|
description: 'Byte offset for paginated read (default: 0)',
|
||||||
|
},
|
||||||
|
limit: {
|
||||||
|
type: 'number',
|
||||||
|
description: 'Max bytes to read (default: 65536 = 64KB)',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
required: ['operation'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
export async function handler(
|
||||||
|
params: Record<string, unknown>
|
||||||
|
): Promise<ToolResult<OperationResult>> {
|
||||||
|
const parsed = ParamsSchema.safeParse(params);
|
||||||
|
|
||||||
|
if (!parsed.success) {
|
||||||
|
return { success: false, error: `Invalid params: ${parsed.error.message}` };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await execute(parsed.data);
|
||||||
|
return { success: true, result };
|
||||||
|
} catch (error) {
|
||||||
|
return { success: false, error: (error as Error).message };
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -22,6 +22,7 @@ import { executeInitWithProgress } from './smart-search.js';
|
|||||||
// codex_lens removed - functionality integrated into smart_search
|
// codex_lens removed - functionality integrated into smart_search
|
||||||
import * as readFileMod from './read-file.js';
|
import * as readFileMod from './read-file.js';
|
||||||
import * as coreMemoryMod from './core-memory.js';
|
import * as coreMemoryMod from './core-memory.js';
|
||||||
|
import * as contextCacheMod from './context-cache.js';
|
||||||
import type { ProgressInfo } from './codex-lens.js';
|
import type { ProgressInfo } from './codex-lens.js';
|
||||||
|
|
||||||
// Import legacy JS tools
|
// Import legacy JS tools
|
||||||
@@ -357,6 +358,7 @@ registerTool(toLegacyTool(smartSearchMod));
|
|||||||
// codex_lens removed - functionality integrated into smart_search
|
// codex_lens removed - functionality integrated into smart_search
|
||||||
registerTool(toLegacyTool(readFileMod));
|
registerTool(toLegacyTool(readFileMod));
|
||||||
registerTool(toLegacyTool(coreMemoryMod));
|
registerTool(toLegacyTool(coreMemoryMod));
|
||||||
|
registerTool(toLegacyTool(contextCacheMod));
|
||||||
|
|
||||||
// Register legacy JS tools
|
// Register legacy JS tools
|
||||||
registerTool(uiGeneratePreviewTool);
|
registerTool(uiGeneratePreviewTool);
|
||||||
|
|||||||
329
ccw/src/tools/pattern-parser.ts
Normal file
329
ccw/src/tools/pattern-parser.ts
Normal file
@@ -0,0 +1,329 @@
|
|||||||
|
/**
|
||||||
|
* Pattern Parser - Parse @expression patterns to file lists
|
||||||
|
* Supports glob patterns like @src/**.ts, @CLAUDE.md, @../shared/**
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { glob } from 'glob';
|
||||||
|
import { resolve, isAbsolute, normalize } from 'path';
|
||||||
|
import { existsSync, statSync, readFileSync } from 'fs';
|
||||||
|
|
||||||
|
/** Result of parsing @patterns */
|
||||||
|
export interface PatternParseResult {
|
||||||
|
files: string[]; // Matched file paths (absolute)
|
||||||
|
patterns: string[]; // Original patterns
|
||||||
|
errors: string[]; // Parse errors
|
||||||
|
stats: {
|
||||||
|
total_files: number;
|
||||||
|
total_patterns: number;
|
||||||
|
matched_patterns: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Options for pattern parsing */
|
||||||
|
export interface PatternParseOptions {
|
||||||
|
cwd?: string; // Working directory
|
||||||
|
includeDirs?: string[]; // Additional directories to include
|
||||||
|
ignore?: string[]; // Ignore patterns
|
||||||
|
maxFiles?: number; // Max files to return (default: 1000)
|
||||||
|
followSymlinks?: boolean; // Follow symlinks (default: false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Default ignore patterns */
|
||||||
|
const DEFAULT_IGNORE = [
|
||||||
|
'**/node_modules/**',
|
||||||
|
'**/.git/**',
|
||||||
|
'**/dist/**',
|
||||||
|
'**/build/**',
|
||||||
|
'**/.next/**',
|
||||||
|
'**/__pycache__/**',
|
||||||
|
'**/*.pyc',
|
||||||
|
'**/venv/**',
|
||||||
|
'**/.venv/**',
|
||||||
|
];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract pattern from @expression
|
||||||
|
* Example: "@src/**.ts" -> "src/**.ts"
|
||||||
|
*/
|
||||||
|
function extractPattern(expression: string): string | null {
|
||||||
|
const trimmed = expression.trim();
|
||||||
|
if (!trimmed.startsWith('@')) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return trimmed.slice(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a pattern is a glob pattern or exact file
|
||||||
|
*/
|
||||||
|
function isGlobPattern(pattern: string): boolean {
|
||||||
|
return pattern.includes('*') || pattern.includes('?') || pattern.includes('{') || pattern.includes('[');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate that a path is within allowed directories
|
||||||
|
*/
|
||||||
|
function isPathAllowed(filePath: string, allowedDirs: string[]): boolean {
|
||||||
|
const normalized = normalize(filePath);
|
||||||
|
return allowedDirs.some(dir => normalized.startsWith(normalize(dir)));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build allowed directories list from options
|
||||||
|
*/
|
||||||
|
function buildAllowedDirs(cwd: string, includeDirs?: string[]): string[] {
|
||||||
|
const allowed = [cwd];
|
||||||
|
|
||||||
|
if (includeDirs) {
|
||||||
|
for (const dir of includeDirs) {
|
||||||
|
const absDir = isAbsolute(dir) ? dir : resolve(cwd, dir);
|
||||||
|
if (existsSync(absDir) && statSync(absDir).isDirectory()) {
|
||||||
|
allowed.push(absDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return allowed.map(d => normalize(d));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse @expressions and return matched files
|
||||||
|
*/
|
||||||
|
export async function parsePatterns(
|
||||||
|
patterns: string[],
|
||||||
|
options: PatternParseOptions = {}
|
||||||
|
): Promise<PatternParseResult> {
|
||||||
|
const {
|
||||||
|
cwd = process.cwd(),
|
||||||
|
includeDirs = [],
|
||||||
|
ignore = [],
|
||||||
|
maxFiles = 1000,
|
||||||
|
followSymlinks = false,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
const result: PatternParseResult = {
|
||||||
|
files: [],
|
||||||
|
patterns: [],
|
||||||
|
errors: [],
|
||||||
|
stats: {
|
||||||
|
total_files: 0,
|
||||||
|
total_patterns: patterns.length,
|
||||||
|
matched_patterns: 0,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build allowed directories
|
||||||
|
const allowedDirs = buildAllowedDirs(cwd, includeDirs);
|
||||||
|
|
||||||
|
// Merge ignore patterns
|
||||||
|
const allIgnore = [...DEFAULT_IGNORE, ...ignore];
|
||||||
|
|
||||||
|
// Track unique files
|
||||||
|
const fileSet = new Set<string>();
|
||||||
|
|
||||||
|
for (const expr of patterns) {
|
||||||
|
const pattern = extractPattern(expr);
|
||||||
|
|
||||||
|
if (!pattern) {
|
||||||
|
result.errors.push(`Invalid pattern: ${expr} (must start with @)`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
result.patterns.push(pattern);
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (isGlobPattern(pattern)) {
|
||||||
|
// Glob pattern - use glob package
|
||||||
|
// Determine base directory for pattern
|
||||||
|
let baseDir = cwd;
|
||||||
|
let globPattern = pattern;
|
||||||
|
|
||||||
|
// Handle relative paths like ../shared/**
|
||||||
|
if (pattern.startsWith('../') || pattern.startsWith('./')) {
|
||||||
|
const parts = pattern.split('/');
|
||||||
|
const pathParts: string[] = [];
|
||||||
|
let i = 0;
|
||||||
|
|
||||||
|
// Extract path prefix
|
||||||
|
while (i < parts.length && (parts[i] === '..' || parts[i] === '.')) {
|
||||||
|
pathParts.push(parts[i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Keep non-glob path parts
|
||||||
|
while (i < parts.length && !isGlobPattern(parts[i])) {
|
||||||
|
pathParts.push(parts[i]);
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve base directory
|
||||||
|
if (pathParts.length > 0) {
|
||||||
|
baseDir = resolve(cwd, pathParts.join('/'));
|
||||||
|
globPattern = parts.slice(i).join('/') || '**/*';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if base directory is allowed
|
||||||
|
if (!isPathAllowed(baseDir, allowedDirs)) {
|
||||||
|
result.errors.push(`Pattern ${expr}: base directory not in allowed paths`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute glob using the glob package
|
||||||
|
const matches = await glob(globPattern, {
|
||||||
|
cwd: baseDir,
|
||||||
|
absolute: true,
|
||||||
|
nodir: true,
|
||||||
|
follow: followSymlinks,
|
||||||
|
ignore: allIgnore,
|
||||||
|
dot: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
let matchCount = 0;
|
||||||
|
for (const file of matches) {
|
||||||
|
// Validate each file is in allowed directories
|
||||||
|
if (isPathAllowed(file, allowedDirs)) {
|
||||||
|
fileSet.add(file);
|
||||||
|
matchCount++;
|
||||||
|
if (fileSet.size >= maxFiles) break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (matchCount > 0) {
|
||||||
|
result.stats.matched_patterns++;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Exact file path
|
||||||
|
const absPath = isAbsolute(pattern) ? pattern : resolve(cwd, pattern);
|
||||||
|
|
||||||
|
// Validate path is allowed
|
||||||
|
if (!isPathAllowed(absPath, allowedDirs)) {
|
||||||
|
result.errors.push(`Pattern ${expr}: path not in allowed directories`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check file exists
|
||||||
|
if (existsSync(absPath) && statSync(absPath).isFile()) {
|
||||||
|
fileSet.add(absPath);
|
||||||
|
result.stats.matched_patterns++;
|
||||||
|
} else {
|
||||||
|
result.errors.push(`Pattern ${expr}: file not found`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
result.errors.push(`Pattern ${expr}: ${(err as Error).message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check max files limit
|
||||||
|
if (fileSet.size >= maxFiles) {
|
||||||
|
result.errors.push(`Max files limit (${maxFiles}) reached`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.files = Array.from(fileSet);
|
||||||
|
result.stats.total_files = result.files.length;
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Pack files into a single content string with metadata headers
|
||||||
|
*/
|
||||||
|
export async function packFiles(
|
||||||
|
files: string[],
|
||||||
|
options: {
|
||||||
|
includeMetadata?: boolean;
|
||||||
|
separator?: string;
|
||||||
|
maxFileSize?: number; // Max size per file in bytes (default: 1MB)
|
||||||
|
} = {}
|
||||||
|
): Promise<{
|
||||||
|
content: string;
|
||||||
|
packedFiles: string[];
|
||||||
|
skippedFiles: string[];
|
||||||
|
totalBytes: number;
|
||||||
|
}> {
|
||||||
|
const {
|
||||||
|
includeMetadata = true,
|
||||||
|
separator = '\n\n',
|
||||||
|
maxFileSize = 1024 * 1024, // 1MB default
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
const parts: string[] = [];
|
||||||
|
const packedFiles: string[] = [];
|
||||||
|
const skippedFiles: string[] = [];
|
||||||
|
let totalBytes = 0;
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
try {
|
||||||
|
const stats = statSync(file);
|
||||||
|
|
||||||
|
// Skip files that are too large
|
||||||
|
if (stats.size > maxFileSize) {
|
||||||
|
skippedFiles.push(file);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const content = readFileSync(file, 'utf-8');
|
||||||
|
|
||||||
|
if (includeMetadata) {
|
||||||
|
// Add file header with metadata
|
||||||
|
const header = [
|
||||||
|
`=== FILE: ${file} ===`,
|
||||||
|
`Size: ${stats.size} bytes`,
|
||||||
|
`Modified: ${stats.mtime.toISOString()}`,
|
||||||
|
'---',
|
||||||
|
].join('\n');
|
||||||
|
parts.push(header + '\n' + content);
|
||||||
|
} else {
|
||||||
|
parts.push(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
packedFiles.push(file);
|
||||||
|
totalBytes += content.length;
|
||||||
|
} catch {
|
||||||
|
skippedFiles.push(file);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: parts.join(separator),
|
||||||
|
packedFiles,
|
||||||
|
skippedFiles,
|
||||||
|
totalBytes,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse patterns and pack files in one call
|
||||||
|
*/
|
||||||
|
export async function parseAndPack(
|
||||||
|
patterns: string[],
|
||||||
|
options: PatternParseOptions & {
|
||||||
|
includeMetadata?: boolean;
|
||||||
|
separator?: string;
|
||||||
|
maxFileSize?: number;
|
||||||
|
} = {}
|
||||||
|
): Promise<{
|
||||||
|
content: string;
|
||||||
|
parseResult: PatternParseResult;
|
||||||
|
packedFiles: string[];
|
||||||
|
skippedFiles: string[];
|
||||||
|
totalBytes: number;
|
||||||
|
}> {
|
||||||
|
const parseResult = await parsePatterns(patterns, options);
|
||||||
|
|
||||||
|
const packResult = await packFiles(parseResult.files, {
|
||||||
|
includeMetadata: options.includeMetadata,
|
||||||
|
separator: options.separator,
|
||||||
|
maxFileSize: options.maxFileSize,
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
content: packResult.content,
|
||||||
|
parseResult,
|
||||||
|
packedFiles: packResult.packedFiles,
|
||||||
|
skippedFiles: packResult.skippedFiles,
|
||||||
|
totalBytes: packResult.totalBytes,
|
||||||
|
};
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user