mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-05 01:50:27 +08:00
feat: Add CLAUDE.md freshness tracking and update reminders
- Add SQLite table and CRUD methods for tracking update history - Create freshness calculation service based on git file changes - Add API endpoints for freshness data, marking updates, and history - Display freshness badges in file tree (green/yellow/red indicators) - Show freshness gauge and details in metadata panel - Auto-mark files as updated after CLI sync - Add English and Chinese i18n translations Freshness algorithm: 100 - min((changedFilesCount / 20) * 100, 100) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
319
ccw/src/core/claude-freshness.ts
Normal file
319
ccw/src/core/claude-freshness.ts
Normal file
@@ -0,0 +1,319 @@
|
|||||||
|
/**
|
||||||
|
* CLAUDE.md Freshness Calculator
|
||||||
|
* Calculates freshness scores based on git changes since last update
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { execSync } from 'child_process';
|
||||||
|
import { existsSync, statSync, readdirSync } from 'fs';
|
||||||
|
import { dirname, extname, relative, join } from 'path';
|
||||||
|
import { getCoreMemoryStore, ClaudeUpdateRecord } from './core-memory-store.js';
|
||||||
|
|
||||||
|
// Source file extensions to track (from detect-changed-modules.ts)
|
||||||
|
const SOURCE_EXTENSIONS = [
|
||||||
|
'.md', '.js', '.ts', '.jsx', '.tsx',
|
||||||
|
'.py', '.go', '.rs', '.java', '.cpp', '.c', '.h',
|
||||||
|
'.sh', '.ps1', '.json', '.yaml', '.yml'
|
||||||
|
];
|
||||||
|
|
||||||
|
// Directories to exclude
|
||||||
|
const EXCLUDE_DIRS = [
|
||||||
|
'.git', '__pycache__', 'node_modules', '.venv', 'venv', 'env',
|
||||||
|
'dist', 'build', '.cache', '.pytest_cache', '.mypy_cache',
|
||||||
|
'coverage', '.nyc_output', 'logs', 'tmp', 'temp', '.ccw', '.workflow'
|
||||||
|
];
|
||||||
|
|
||||||
|
export interface FreshnessResult {
|
||||||
|
path: string;
|
||||||
|
level: 'user' | 'project' | 'module';
|
||||||
|
relativePath: string;
|
||||||
|
parentDirectory?: string;
|
||||||
|
lastUpdated: string | null;
|
||||||
|
lastModified: string;
|
||||||
|
changedFilesCount: number;
|
||||||
|
freshness: number;
|
||||||
|
updateSource?: string;
|
||||||
|
needsUpdate: boolean;
|
||||||
|
changedFiles?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FreshnessSummary {
|
||||||
|
totalFiles: number;
|
||||||
|
staleCount: number;
|
||||||
|
averageFreshness: number;
|
||||||
|
lastScanAt: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface FreshnessResponse {
|
||||||
|
files: FreshnessResult[];
|
||||||
|
summary: FreshnessSummary;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if git is available and we're in a repo
|
||||||
|
*/
|
||||||
|
function isGitRepo(basePath: string): boolean {
|
||||||
|
try {
|
||||||
|
execSync('git rev-parse --git-dir', { cwd: basePath, stdio: 'pipe' });
|
||||||
|
return true;
|
||||||
|
} catch (e) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get current git commit hash
|
||||||
|
*/
|
||||||
|
export function getCurrentGitCommit(basePath: string): string | null {
|
||||||
|
try {
|
||||||
|
const output = execSync('git rev-parse HEAD', {
|
||||||
|
cwd: basePath,
|
||||||
|
encoding: 'utf8',
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
|
}).trim();
|
||||||
|
return output || null;
|
||||||
|
} catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get files changed since a specific date within a directory
|
||||||
|
*/
|
||||||
|
function getChangedFilesSince(basePath: string, modulePath: string, sinceDate: string): string[] {
|
||||||
|
try {
|
||||||
|
// Format date for git
|
||||||
|
const date = new Date(sinceDate);
|
||||||
|
const formattedDate = date.toISOString().split('T')[0];
|
||||||
|
|
||||||
|
// Get files changed since the date
|
||||||
|
const output = execSync(
|
||||||
|
`git log --name-only --since="${formattedDate}" --pretty=format: -- "${modulePath}"`,
|
||||||
|
{
|
||||||
|
cwd: basePath,
|
||||||
|
encoding: 'utf8',
|
||||||
|
stdio: ['pipe', 'pipe', 'pipe']
|
||||||
|
}
|
||||||
|
).trim();
|
||||||
|
|
||||||
|
if (!output) return [];
|
||||||
|
|
||||||
|
// Get unique files and filter by source extensions
|
||||||
|
const files = [...new Set(output.split('\n').filter(f => f.trim()))];
|
||||||
|
return files.filter(f => {
|
||||||
|
const ext = extname(f).toLowerCase();
|
||||||
|
return SOURCE_EXTENSIONS.includes(ext);
|
||||||
|
});
|
||||||
|
} catch (e) {
|
||||||
|
// Fallback to mtime-based detection
|
||||||
|
return findFilesModifiedSince(modulePath, sinceDate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fallback: Find files modified since a date using mtime
|
||||||
|
*/
|
||||||
|
function findFilesModifiedSince(dirPath: string, sinceDate: string): string[] {
|
||||||
|
const results: string[] = [];
|
||||||
|
const cutoffTime = new Date(sinceDate).getTime();
|
||||||
|
|
||||||
|
function scan(currentPath: string): void {
|
||||||
|
try {
|
||||||
|
const entries = readdirSync(currentPath, { withFileTypes: true });
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
if (EXCLUDE_DIRS.includes(entry.name)) continue;
|
||||||
|
scan(join(currentPath, entry.name));
|
||||||
|
} else if (entry.isFile()) {
|
||||||
|
const ext = extname(entry.name).toLowerCase();
|
||||||
|
if (!SOURCE_EXTENSIONS.includes(ext)) continue;
|
||||||
|
|
||||||
|
const fullPath = join(currentPath, entry.name);
|
||||||
|
try {
|
||||||
|
const stat = statSync(fullPath);
|
||||||
|
if (stat.mtimeMs > cutoffTime) {
|
||||||
|
results.push(relative(dirPath, fullPath));
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Skip files we can't stat
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
// Ignore permission errors
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existsSync(dirPath)) {
|
||||||
|
scan(dirPath);
|
||||||
|
}
|
||||||
|
return results;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate freshness for a single CLAUDE.md file
|
||||||
|
*/
|
||||||
|
export function calculateFreshness(
|
||||||
|
filePath: string,
|
||||||
|
fileLevel: 'user' | 'project' | 'module',
|
||||||
|
lastUpdateTime: string | null,
|
||||||
|
lastModified: string,
|
||||||
|
projectPath: string,
|
||||||
|
threshold: number = 20
|
||||||
|
): FreshnessResult {
|
||||||
|
// Use lastUpdateTime from history, or fall back to file mtime
|
||||||
|
const effectiveUpdateTime = lastUpdateTime || lastModified;
|
||||||
|
|
||||||
|
// Calculate module path for change detection
|
||||||
|
let modulePath: string | null = null;
|
||||||
|
let changedFiles: string[] = [];
|
||||||
|
|
||||||
|
if (fileLevel === 'module') {
|
||||||
|
// For module-level files, scan the parent directory
|
||||||
|
modulePath = dirname(filePath);
|
||||||
|
} else if (fileLevel === 'project') {
|
||||||
|
// For project-level files, scan the project root
|
||||||
|
modulePath = projectPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only calculate changes for module/project level in git repos
|
||||||
|
if (modulePath && isGitRepo(projectPath)) {
|
||||||
|
changedFiles = getChangedFilesSince(projectPath, modulePath, effectiveUpdateTime);
|
||||||
|
// Exclude the CLAUDE.md file itself
|
||||||
|
changedFiles = changedFiles.filter(f => !f.endsWith('CLAUDE.md'));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate freshness percentage
|
||||||
|
const changedCount = changedFiles.length;
|
||||||
|
const freshness = Math.max(0, 100 - Math.floor((changedCount / threshold) * 100));
|
||||||
|
|
||||||
|
// Determine parent directory for display
|
||||||
|
const parentDirectory = fileLevel === 'module'
|
||||||
|
? filePath.split(/[\\/]/).slice(-2, -1)[0]
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: filePath,
|
||||||
|
level: fileLevel,
|
||||||
|
relativePath: relative(projectPath, filePath).replace(/\\/g, '/'),
|
||||||
|
parentDirectory,
|
||||||
|
lastUpdated: lastUpdateTime,
|
||||||
|
lastModified,
|
||||||
|
changedFilesCount: changedCount,
|
||||||
|
freshness,
|
||||||
|
needsUpdate: freshness < 50,
|
||||||
|
changedFiles: changedFiles.slice(0, 20) // Limit to first 20 for detail view
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate freshness for all CLAUDE.md files in a project
|
||||||
|
*/
|
||||||
|
export function calculateAllFreshness(
|
||||||
|
claudeFiles: Array<{
|
||||||
|
path: string;
|
||||||
|
level: 'user' | 'project' | 'module';
|
||||||
|
lastModified: string;
|
||||||
|
}>,
|
||||||
|
projectPath: string,
|
||||||
|
threshold: number = 20
|
||||||
|
): FreshnessResponse {
|
||||||
|
// Get update records from store
|
||||||
|
const store = getCoreMemoryStore(projectPath);
|
||||||
|
const updateRecords = store.getAllClaudeUpdateRecords();
|
||||||
|
|
||||||
|
// Create a map for quick lookup
|
||||||
|
const updateMap = new Map<string, ClaudeUpdateRecord>();
|
||||||
|
for (const record of updateRecords) {
|
||||||
|
updateMap.set(record.file_path, record);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: FreshnessResult[] = [];
|
||||||
|
|
||||||
|
for (const file of claudeFiles) {
|
||||||
|
const updateRecord = updateMap.get(file.path);
|
||||||
|
|
||||||
|
const result = calculateFreshness(
|
||||||
|
file.path,
|
||||||
|
file.level,
|
||||||
|
updateRecord?.updated_at || null,
|
||||||
|
file.lastModified,
|
||||||
|
projectPath,
|
||||||
|
threshold
|
||||||
|
);
|
||||||
|
|
||||||
|
if (updateRecord) {
|
||||||
|
result.updateSource = updateRecord.update_source;
|
||||||
|
}
|
||||||
|
|
||||||
|
results.push(result);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate summary
|
||||||
|
const staleCount = results.filter(r => r.needsUpdate).length;
|
||||||
|
const totalFreshness = results.reduce((sum, r) => sum + r.freshness, 0);
|
||||||
|
const averageFreshness = results.length > 0 ? Math.round(totalFreshness / results.length) : 100;
|
||||||
|
|
||||||
|
return {
|
||||||
|
files: results,
|
||||||
|
summary: {
|
||||||
|
totalFiles: results.length,
|
||||||
|
staleCount,
|
||||||
|
averageFreshness,
|
||||||
|
lastScanAt: new Date().toISOString()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mark a CLAUDE.md file as updated
|
||||||
|
*/
|
||||||
|
export function markFileAsUpdated(
|
||||||
|
filePath: string,
|
||||||
|
fileLevel: 'user' | 'project' | 'module',
|
||||||
|
updateSource: 'manual' | 'cli_sync' | 'dashboard' | 'api',
|
||||||
|
projectPath: string,
|
||||||
|
metadata?: object
|
||||||
|
): ClaudeUpdateRecord {
|
||||||
|
const store = getCoreMemoryStore(projectPath);
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
|
||||||
|
// Get current git commit
|
||||||
|
const gitCommit = getCurrentGitCommit(projectPath);
|
||||||
|
|
||||||
|
// Calculate changed files count before this update
|
||||||
|
const lastUpdate = store.getLastClaudeUpdate(filePath);
|
||||||
|
let filesChangedCount = 0;
|
||||||
|
|
||||||
|
if (lastUpdate && isGitRepo(projectPath)) {
|
||||||
|
const modulePath = fileLevel === 'module' ? dirname(filePath) : projectPath;
|
||||||
|
const changedFiles = getChangedFilesSince(projectPath, modulePath, lastUpdate.updated_at);
|
||||||
|
filesChangedCount = changedFiles.filter(f => !f.endsWith('CLAUDE.md')).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert update record
|
||||||
|
const record = store.insertClaudeUpdateRecord({
|
||||||
|
file_path: filePath,
|
||||||
|
file_level: fileLevel,
|
||||||
|
module_path: fileLevel === 'module' ? dirname(filePath) : undefined,
|
||||||
|
updated_at: now,
|
||||||
|
update_source: updateSource,
|
||||||
|
git_commit_hash: gitCommit || undefined,
|
||||||
|
files_changed_before_update: filesChangedCount,
|
||||||
|
metadata: metadata ? JSON.stringify(metadata) : undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
return record;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get update history for a file
|
||||||
|
*/
|
||||||
|
export function getUpdateHistory(
|
||||||
|
filePath: string,
|
||||||
|
projectPath: string,
|
||||||
|
limit: number = 50
|
||||||
|
): ClaudeUpdateRecord[] {
|
||||||
|
const store = getCoreMemoryStore(projectPath);
|
||||||
|
return store.getClaudeUpdateHistory(filePath, limit);
|
||||||
|
}
|
||||||
@@ -71,6 +71,18 @@ export interface MemoryChunk {
|
|||||||
created_at: string;
|
created_at: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface ClaudeUpdateRecord {
|
||||||
|
id?: number;
|
||||||
|
file_path: string;
|
||||||
|
file_level: 'user' | 'project' | 'module';
|
||||||
|
module_path?: string;
|
||||||
|
updated_at: string;
|
||||||
|
update_source: 'manual' | 'cli_sync' | 'dashboard' | 'api';
|
||||||
|
git_commit_hash?: string;
|
||||||
|
files_changed_before_update: number;
|
||||||
|
metadata?: string;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Core Memory Store using SQLite
|
* Core Memory Store using SQLite
|
||||||
*/
|
*/
|
||||||
@@ -176,6 +188,20 @@ export class CoreMemoryStore {
|
|||||||
UNIQUE(source_id, chunk_index)
|
UNIQUE(source_id, chunk_index)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- CLAUDE.md update history table
|
||||||
|
CREATE TABLE IF NOT EXISTS claude_update_history (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
file_level TEXT NOT NULL CHECK(file_level IN ('user', 'project', 'module')),
|
||||||
|
module_path TEXT,
|
||||||
|
updated_at TEXT NOT NULL,
|
||||||
|
update_source TEXT NOT NULL CHECK(update_source IN ('manual', 'cli_sync', 'dashboard', 'api')),
|
||||||
|
git_commit_hash TEXT,
|
||||||
|
files_changed_before_update INTEGER DEFAULT 0,
|
||||||
|
metadata TEXT,
|
||||||
|
UNIQUE(file_path, updated_at)
|
||||||
|
);
|
||||||
|
|
||||||
-- Indexes for efficient queries
|
-- Indexes for efficient queries
|
||||||
CREATE INDEX IF NOT EXISTS idx_memories_created ON memories(created_at DESC);
|
CREATE INDEX IF NOT EXISTS idx_memories_created ON memories(created_at DESC);
|
||||||
CREATE INDEX IF NOT EXISTS idx_memories_updated ON memories(updated_at DESC);
|
CREATE INDEX IF NOT EXISTS idx_memories_updated ON memories(updated_at DESC);
|
||||||
@@ -186,6 +212,9 @@ export class CoreMemoryStore {
|
|||||||
CREATE INDEX IF NOT EXISTS idx_session_metadata_type ON session_metadata_cache(session_type);
|
CREATE INDEX IF NOT EXISTS idx_session_metadata_type ON session_metadata_cache(session_type);
|
||||||
CREATE INDEX IF NOT EXISTS idx_memory_chunks_source ON memory_chunks(source_id, source_type);
|
CREATE INDEX IF NOT EXISTS idx_memory_chunks_source ON memory_chunks(source_id, source_type);
|
||||||
CREATE INDEX IF NOT EXISTS idx_memory_chunks_embedded ON memory_chunks(embedding IS NOT NULL);
|
CREATE INDEX IF NOT EXISTS idx_memory_chunks_embedded ON memory_chunks(embedding IS NOT NULL);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_claude_history_path ON claude_update_history(file_path);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_claude_history_updated ON claude_update_history(updated_at DESC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_claude_history_module ON claude_update_history(module_path);
|
||||||
`);
|
`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1078,6 +1107,128 @@ ${memory.content}
|
|||||||
stmt.run(sourceId);
|
stmt.run(sourceId);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ============================================================================
|
||||||
|
// CLAUDE.md Update History CRUD Operations
|
||||||
|
// ============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Insert a CLAUDE.md update record
|
||||||
|
*/
|
||||||
|
insertClaudeUpdateRecord(record: Omit<ClaudeUpdateRecord, 'id'>): ClaudeUpdateRecord {
|
||||||
|
const stmt = this.db.prepare(`
|
||||||
|
INSERT INTO claude_update_history
|
||||||
|
(file_path, file_level, module_path, updated_at, update_source, git_commit_hash, files_changed_before_update, metadata)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||||
|
`);
|
||||||
|
|
||||||
|
const result = stmt.run(
|
||||||
|
record.file_path,
|
||||||
|
record.file_level,
|
||||||
|
record.module_path || null,
|
||||||
|
record.updated_at,
|
||||||
|
record.update_source,
|
||||||
|
record.git_commit_hash || null,
|
||||||
|
record.files_changed_before_update,
|
||||||
|
record.metadata || null
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: result.lastInsertRowid as number,
|
||||||
|
...record
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the last update record for a file
|
||||||
|
*/
|
||||||
|
getLastClaudeUpdate(filePath: string): ClaudeUpdateRecord | null {
|
||||||
|
const stmt = this.db.prepare(`
|
||||||
|
SELECT * FROM claude_update_history
|
||||||
|
WHERE file_path = ?
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
`);
|
||||||
|
|
||||||
|
const row = stmt.get(filePath) as any;
|
||||||
|
if (!row) return null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
file_path: row.file_path,
|
||||||
|
file_level: row.file_level,
|
||||||
|
module_path: row.module_path,
|
||||||
|
updated_at: row.updated_at,
|
||||||
|
update_source: row.update_source,
|
||||||
|
git_commit_hash: row.git_commit_hash,
|
||||||
|
files_changed_before_update: row.files_changed_before_update,
|
||||||
|
metadata: row.metadata
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get update history for a file
|
||||||
|
*/
|
||||||
|
getClaudeUpdateHistory(filePath: string, limit: number = 50): ClaudeUpdateRecord[] {
|
||||||
|
const stmt = this.db.prepare(`
|
||||||
|
SELECT * FROM claude_update_history
|
||||||
|
WHERE file_path = ?
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT ?
|
||||||
|
`);
|
||||||
|
|
||||||
|
const rows = stmt.all(filePath, limit) as any[];
|
||||||
|
return rows.map(row => ({
|
||||||
|
id: row.id,
|
||||||
|
file_path: row.file_path,
|
||||||
|
file_level: row.file_level,
|
||||||
|
module_path: row.module_path,
|
||||||
|
updated_at: row.updated_at,
|
||||||
|
update_source: row.update_source,
|
||||||
|
git_commit_hash: row.git_commit_hash,
|
||||||
|
files_changed_before_update: row.files_changed_before_update,
|
||||||
|
metadata: row.metadata
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all CLAUDE.md update records for freshness calculation
|
||||||
|
*/
|
||||||
|
getAllClaudeUpdateRecords(): ClaudeUpdateRecord[] {
|
||||||
|
const stmt = this.db.prepare(`
|
||||||
|
SELECT * FROM claude_update_history
|
||||||
|
WHERE id IN (
|
||||||
|
SELECT MAX(id) FROM claude_update_history
|
||||||
|
GROUP BY file_path
|
||||||
|
)
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
`);
|
||||||
|
|
||||||
|
const rows = stmt.all() as any[];
|
||||||
|
return rows.map(row => ({
|
||||||
|
id: row.id,
|
||||||
|
file_path: row.file_path,
|
||||||
|
file_level: row.file_level,
|
||||||
|
module_path: row.module_path,
|
||||||
|
updated_at: row.updated_at,
|
||||||
|
update_source: row.update_source,
|
||||||
|
git_commit_hash: row.git_commit_hash,
|
||||||
|
files_changed_before_update: row.files_changed_before_update,
|
||||||
|
metadata: row.metadata
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete update records for a file
|
||||||
|
*/
|
||||||
|
deleteClaudeUpdateRecords(filePath: string): number {
|
||||||
|
const stmt = this.db.prepare(`
|
||||||
|
DELETE FROM claude_update_history
|
||||||
|
WHERE file_path = ?
|
||||||
|
`);
|
||||||
|
const result = stmt.run(filePath);
|
||||||
|
return result.changes;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Close database connection
|
* Close database connection
|
||||||
*/
|
*/
|
||||||
|
|||||||
@@ -651,6 +651,14 @@ export async function handleClaudeRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
// Write updated content
|
// Write updated content
|
||||||
writeFileSync(filePath, finalContent, 'utf8');
|
writeFileSync(filePath, finalContent, 'utf8');
|
||||||
|
|
||||||
|
// Mark file as updated for freshness tracking
|
||||||
|
try {
|
||||||
|
const { markFileAsUpdated } = await import('../claude-freshness.js');
|
||||||
|
markFileAsUpdated(filePath, level, 'cli_sync', initialPath, { tool, mode });
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Failed to mark file as updated:', e);
|
||||||
|
}
|
||||||
|
|
||||||
// Broadcast WebSocket event
|
// Broadcast WebSocket event
|
||||||
broadcastToClients({
|
broadcastToClients({
|
||||||
type: 'CLAUDE_FILE_SYNCED',
|
type: 'CLAUDE_FILE_SYNCED',
|
||||||
@@ -1026,5 +1034,150 @@ export async function handleClaudeRoutes(ctx: RouteContext): Promise<boolean> {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// API: Get freshness scores for all CLAUDE.md files
|
||||||
|
if (pathname === '/api/memory/claude/freshness' && req.method === 'GET') {
|
||||||
|
try {
|
||||||
|
const { calculateAllFreshness } = await import('../claude-freshness.js');
|
||||||
|
|
||||||
|
const projectPathParam = url.searchParams.get('path') || initialPath;
|
||||||
|
const threshold = parseInt(url.searchParams.get('threshold') || '20', 10);
|
||||||
|
|
||||||
|
// Get all CLAUDE.md files
|
||||||
|
const filesData = scanAllClaudeFiles(projectPathParam);
|
||||||
|
|
||||||
|
// Prepare file list for freshness calculation
|
||||||
|
const claudeFiles: Array<{
|
||||||
|
path: string;
|
||||||
|
level: 'user' | 'project' | 'module';
|
||||||
|
lastModified: string;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
if (filesData.user.main) {
|
||||||
|
claudeFiles.push({
|
||||||
|
path: filesData.user.main.path,
|
||||||
|
level: 'user',
|
||||||
|
lastModified: filesData.user.main.lastModified
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filesData.project.main) {
|
||||||
|
claudeFiles.push({
|
||||||
|
path: filesData.project.main.path,
|
||||||
|
level: 'project',
|
||||||
|
lastModified: filesData.project.main.lastModified
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const module of filesData.modules) {
|
||||||
|
claudeFiles.push({
|
||||||
|
path: module.path,
|
||||||
|
level: 'module',
|
||||||
|
lastModified: module.lastModified
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate freshness
|
||||||
|
const freshnessData = calculateAllFreshness(claudeFiles, projectPathParam, threshold);
|
||||||
|
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify(freshnessData));
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error calculating freshness:', error);
|
||||||
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// API: Mark a CLAUDE.md file as updated
|
||||||
|
if (pathname === '/api/memory/claude/mark-updated' && req.method === 'POST') {
|
||||||
|
handlePostRequest(req, res, async (body: any) => {
|
||||||
|
const { path: filePath, source, metadata } = body;
|
||||||
|
|
||||||
|
if (!filePath) {
|
||||||
|
return { error: 'Missing path parameter', status: 400 };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!source || !['manual', 'cli_sync', 'dashboard', 'api'].includes(source)) {
|
||||||
|
return { error: 'Invalid or missing source parameter', status: 400 };
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { markFileAsUpdated } = await import('../claude-freshness.js');
|
||||||
|
|
||||||
|
// Determine file level
|
||||||
|
let level: 'user' | 'project' | 'module' = 'module';
|
||||||
|
if (filePath.includes(join(homedir(), '.claude'))) {
|
||||||
|
level = 'user';
|
||||||
|
} else if (filePath.includes('.claude')) {
|
||||||
|
level = 'project';
|
||||||
|
}
|
||||||
|
|
||||||
|
const record = markFileAsUpdated(filePath, level, source, initialPath, metadata);
|
||||||
|
|
||||||
|
// Broadcast update
|
||||||
|
broadcastToClients({
|
||||||
|
type: 'CLAUDE_FRESHNESS_UPDATED',
|
||||||
|
data: {
|
||||||
|
path: filePath,
|
||||||
|
level,
|
||||||
|
updatedAt: record.updated_at,
|
||||||
|
source
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
record: {
|
||||||
|
id: record.id,
|
||||||
|
updated_at: record.updated_at,
|
||||||
|
filesChangedBeforeUpdate: record.files_changed_before_update
|
||||||
|
}
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error marking file as updated:', error);
|
||||||
|
return { error: (error as Error).message, status: 500 };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// API: Get update history for a CLAUDE.md file
|
||||||
|
if (pathname === '/api/memory/claude/history' && req.method === 'GET') {
|
||||||
|
const filePath = url.searchParams.get('path');
|
||||||
|
const limit = parseInt(url.searchParams.get('limit') || '50', 10);
|
||||||
|
|
||||||
|
if (!filePath) {
|
||||||
|
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error: 'Missing path parameter' }));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { getUpdateHistory } = await import('../claude-freshness.js');
|
||||||
|
|
||||||
|
const records = getUpdateHistory(filePath, initialPath, limit);
|
||||||
|
|
||||||
|
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({
|
||||||
|
records: records.map(r => ({
|
||||||
|
id: r.id,
|
||||||
|
updated_at: r.updated_at,
|
||||||
|
update_source: r.update_source,
|
||||||
|
git_commit_hash: r.git_commit_hash,
|
||||||
|
files_changed_before_update: r.files_changed_before_update,
|
||||||
|
metadata: r.metadata ? JSON.parse(r.metadata) : undefined
|
||||||
|
}))
|
||||||
|
}));
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error getting update history:', error);
|
||||||
|
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||||
|
res.end(JSON.stringify({ error: (error as Error).message }));
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ import { getAllToolSchemas, executeTool, executeToolWithProgress } from '../tool
|
|||||||
import type { ToolSchema, ToolResult } from '../types/tool.js';
|
import type { ToolSchema, ToolResult } from '../types/tool.js';
|
||||||
|
|
||||||
const SERVER_NAME = 'ccw-tools';
|
const SERVER_NAME = 'ccw-tools';
|
||||||
const SERVER_VERSION = '6.1.4';
|
const SERVER_VERSION = '6.2.0';
|
||||||
|
|
||||||
// Default enabled tools (core set)
|
// Default enabled tools (core set)
|
||||||
const DEFAULT_TOOLS: string[] = ['write_file', 'edit_file', 'read_file', 'smart_search', 'core_memory'];
|
const DEFAULT_TOOLS: string[] = ['write_file', 'edit_file', 'read_file', 'smart_search', 'core_memory'];
|
||||||
|
|||||||
@@ -734,6 +734,118 @@
|
|||||||
border-color: hsl(0, 72%, 45%);
|
border-color: hsl(0, 72%, 45%);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ========================================
|
||||||
|
* Freshness Tracking Styles
|
||||||
|
* ======================================== */
|
||||||
|
|
||||||
|
/* Freshness badges in file tree */
|
||||||
|
.freshness-badge {
|
||||||
|
font-size: 0.65rem;
|
||||||
|
padding: 1px 4px;
|
||||||
|
border-radius: 3px;
|
||||||
|
margin-left: auto;
|
||||||
|
font-weight: 500;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-badge.good {
|
||||||
|
background: hsl(142, 71%, 45%, 0.15);
|
||||||
|
color: hsl(142, 71%, 45%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-badge.warn {
|
||||||
|
background: hsl(38, 92%, 50%, 0.15);
|
||||||
|
color: hsl(38, 92%, 50%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-badge.stale {
|
||||||
|
background: hsl(0, 72%, 51%, 0.15);
|
||||||
|
color: hsl(0, 72%, 51%);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* File tree item freshness states */
|
||||||
|
.file-tree-item.freshness-stale {
|
||||||
|
border-left: 2px solid hsl(0, 72%, 51%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.file-tree-item.freshness-warn {
|
||||||
|
border-left: 2px solid hsl(38, 92%, 50%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.file-tree-item.freshness-good {
|
||||||
|
border-left: 2px solid hsl(142, 71%, 45%);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Freshness section in metadata panel */
|
||||||
|
.freshness-section {
|
||||||
|
padding: 1rem;
|
||||||
|
border: 1px solid hsl(var(--border));
|
||||||
|
border-radius: 0.5rem;
|
||||||
|
background: hsl(var(--card));
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-section h4 {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Freshness gauge/progress bar */
|
||||||
|
.freshness-gauge {
|
||||||
|
position: relative;
|
||||||
|
height: 8px;
|
||||||
|
background: hsl(var(--muted));
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
margin: 0.75rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-bar {
|
||||||
|
height: 100%;
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: width 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-bar.good {
|
||||||
|
background: linear-gradient(90deg, hsl(142, 71%, 45%), hsl(142, 71%, 55%));
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-bar.warn {
|
||||||
|
background: linear-gradient(90deg, hsl(38, 92%, 50%), hsl(45, 92%, 50%));
|
||||||
|
}
|
||||||
|
|
||||||
|
.freshness-bar.stale {
|
||||||
|
background: linear-gradient(90deg, hsl(0, 72%, 51%), hsl(15, 72%, 51%));
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Freshness value display */
|
||||||
|
.freshness-value-display {
|
||||||
|
text-align: center;
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 700;
|
||||||
|
color: hsl(var(--foreground));
|
||||||
|
margin-bottom: 0.75rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Update reminder warning */
|
||||||
|
.update-reminder {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
padding: 0.75rem;
|
||||||
|
background: hsl(38, 92%, 50%, 0.1);
|
||||||
|
border: 1px solid hsl(38, 92%, 50%, 0.3);
|
||||||
|
border-radius: 0.375rem;
|
||||||
|
color: hsl(38, 92%, 40%);
|
||||||
|
font-size: 0.8rem;
|
||||||
|
margin: 0.75rem 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.update-reminder i {
|
||||||
|
flex-shrink: 0;
|
||||||
|
color: hsl(38, 92%, 50%);
|
||||||
|
}
|
||||||
|
|
||||||
/* ========================================
|
/* ========================================
|
||||||
* Responsive Design
|
* Responsive Design
|
||||||
* ======================================== */
|
* ======================================== */
|
||||||
|
|||||||
@@ -1087,6 +1087,15 @@ const i18n = {
|
|||||||
'claudeManager.unsavedChanges': 'You have unsaved changes. Discard them?',
|
'claudeManager.unsavedChanges': 'You have unsaved changes. Discard them?',
|
||||||
'claudeManager.saved': 'File saved successfully',
|
'claudeManager.saved': 'File saved successfully',
|
||||||
'claudeManager.saveError': 'Failed to save file',
|
'claudeManager.saveError': 'Failed to save file',
|
||||||
|
'claudeManager.freshness': 'Freshness',
|
||||||
|
'claudeManager.lastContentUpdate': 'Last Content Update',
|
||||||
|
'claudeManager.changedFiles': 'Changed Files',
|
||||||
|
'claudeManager.filesSinceUpdate': 'files since update',
|
||||||
|
'claudeManager.updateReminder': 'This file may need updating',
|
||||||
|
'claudeManager.markAsUpdated': 'Mark as Updated',
|
||||||
|
'claudeManager.markedAsUpdated': 'Marked as updated successfully',
|
||||||
|
'claudeManager.markUpdateError': 'Failed to mark as updated',
|
||||||
|
'claudeManager.never': 'Never tracked',
|
||||||
|
|
||||||
// Graph Explorer
|
// Graph Explorer
|
||||||
'nav.graphExplorer': 'Graph',
|
'nav.graphExplorer': 'Graph',
|
||||||
@@ -2377,6 +2386,15 @@ const i18n = {
|
|||||||
'claudeManager.unsavedChanges': '您有未保存的更改。是否放弃?',
|
'claudeManager.unsavedChanges': '您有未保存的更改。是否放弃?',
|
||||||
'claudeManager.saved': '文件保存成功',
|
'claudeManager.saved': '文件保存成功',
|
||||||
'claudeManager.saveError': '文件保存失败',
|
'claudeManager.saveError': '文件保存失败',
|
||||||
|
'claudeManager.freshness': '新鲜度',
|
||||||
|
'claudeManager.lastContentUpdate': '上次内容更新',
|
||||||
|
'claudeManager.changedFiles': '变动文件',
|
||||||
|
'claudeManager.filesSinceUpdate': '个文件自上次更新后变动',
|
||||||
|
'claudeManager.updateReminder': '此文件可能需要更新',
|
||||||
|
'claudeManager.markAsUpdated': '标记为已更新',
|
||||||
|
'claudeManager.markedAsUpdated': '已成功标记为已更新',
|
||||||
|
'claudeManager.markUpdateError': '标记更新失败',
|
||||||
|
'claudeManager.never': '从未追踪',
|
||||||
|
|
||||||
// Graph Explorer
|
// Graph Explorer
|
||||||
'nav.graphExplorer': '图谱',
|
'nav.graphExplorer': '图谱',
|
||||||
|
|||||||
@@ -17,6 +17,8 @@ var fileTreeExpanded = {
|
|||||||
modules: {}
|
modules: {}
|
||||||
};
|
};
|
||||||
var searchQuery = '';
|
var searchQuery = '';
|
||||||
|
var freshnessData = {}; // { [filePath]: FreshnessResult }
|
||||||
|
var freshnessSummary = null;
|
||||||
|
|
||||||
// ========== Main Render Function ==========
|
// ========== Main Render Function ==========
|
||||||
async function renderClaudeManager() {
|
async function renderClaudeManager() {
|
||||||
@@ -37,6 +39,7 @@ async function renderClaudeManager() {
|
|||||||
|
|
||||||
// Load data
|
// Load data
|
||||||
await loadClaudeFiles();
|
await loadClaudeFiles();
|
||||||
|
await loadFreshnessData();
|
||||||
|
|
||||||
// Render layout
|
// Render layout
|
||||||
container.innerHTML = '<div class="claude-manager-view">' +
|
container.innerHTML = '<div class="claude-manager-view">' +
|
||||||
@@ -85,10 +88,60 @@ async function loadClaudeFiles() {
|
|||||||
|
|
||||||
async function refreshClaudeFiles() {
|
async function refreshClaudeFiles() {
|
||||||
await loadClaudeFiles();
|
await loadClaudeFiles();
|
||||||
|
await loadFreshnessData();
|
||||||
await renderClaudeManager();
|
await renderClaudeManager();
|
||||||
addGlobalNotification('success', t('claudeManager.refreshed'), null, 'CLAUDE.md');
|
addGlobalNotification('success', t('claudeManager.refreshed'), null, 'CLAUDE.md');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========== Freshness Data Loading ==========
|
||||||
|
async function loadFreshnessData() {
|
||||||
|
try {
|
||||||
|
var res = await fetch('/api/memory/claude/freshness?path=' + encodeURIComponent(projectPath || ''));
|
||||||
|
if (!res.ok) throw new Error('Failed to load freshness data');
|
||||||
|
var data = await res.json();
|
||||||
|
|
||||||
|
// Build lookup map
|
||||||
|
freshnessData = {};
|
||||||
|
if (data.files) {
|
||||||
|
data.files.forEach(function(f) {
|
||||||
|
freshnessData[f.path] = f;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
freshnessSummary = data.summary || null;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading freshness data:', error);
|
||||||
|
freshnessData = {};
|
||||||
|
freshnessSummary = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function markFileAsUpdated() {
|
||||||
|
if (!selectedFile) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
var res = await fetch('/api/memory/claude/mark-updated', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify({
|
||||||
|
path: selectedFile.path,
|
||||||
|
source: 'dashboard'
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!res.ok) throw new Error('Failed to mark file as updated');
|
||||||
|
|
||||||
|
addGlobalNotification('success', t('claudeManager.markedAsUpdated') || 'Marked as updated', null, 'CLAUDE.md');
|
||||||
|
|
||||||
|
// Reload freshness data
|
||||||
|
await loadFreshnessData();
|
||||||
|
renderFileTree();
|
||||||
|
renderFileMetadata();
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error marking file as updated:', error);
|
||||||
|
addGlobalNotification('error', t('claudeManager.markUpdateError') || 'Failed to mark as updated', null, 'CLAUDE.md');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ========== File Tree Rendering ==========
|
// ========== File Tree Rendering ==========
|
||||||
function renderFileTree() {
|
function renderFileTree() {
|
||||||
var container = document.getElementById('claude-file-tree');
|
var container = document.getElementById('claude-file-tree');
|
||||||
@@ -183,11 +236,30 @@ function renderFileTreeItem(file, indentLevel) {
|
|||||||
var indentPx = indentLevel * 1.5;
|
var indentPx = indentLevel * 1.5;
|
||||||
var safeId = file.id.replace(/'/g, "'");
|
var safeId = file.id.replace(/'/g, "'");
|
||||||
|
|
||||||
return '<div class="file-tree-item' + (isSelected ? ' selected' : '') + '" ' +
|
// Get freshness data for this file
|
||||||
|
var fd = freshnessData[file.path];
|
||||||
|
var freshnessClass = '';
|
||||||
|
var freshnessBadge = '';
|
||||||
|
|
||||||
|
if (fd) {
|
||||||
|
if (fd.freshness >= 75) {
|
||||||
|
freshnessClass = ' freshness-good';
|
||||||
|
freshnessBadge = '<span class="freshness-badge good">' + fd.freshness + '%</span>';
|
||||||
|
} else if (fd.freshness >= 50) {
|
||||||
|
freshnessClass = ' freshness-warn';
|
||||||
|
freshnessBadge = '<span class="freshness-badge warn">' + fd.freshness + '%</span>';
|
||||||
|
} else {
|
||||||
|
freshnessClass = ' freshness-stale';
|
||||||
|
freshnessBadge = '<span class="freshness-badge stale">' + fd.freshness + '%</span>';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return '<div class="file-tree-item' + freshnessClass + (isSelected ? ' selected' : '') + '" ' +
|
||||||
'onclick="selectClaudeFile(\'' + safeId + '\')" ' +
|
'onclick="selectClaudeFile(\'' + safeId + '\')" ' +
|
||||||
'style="padding-left: ' + indentPx + 'rem;">' +
|
'style="padding-left: ' + indentPx + 'rem;">' +
|
||||||
'<i data-lucide="file-text" class="w-4 h-4"></i>' +
|
'<i data-lucide="file-text" class="w-4 h-4"></i>' +
|
||||||
'<span class="file-name">' + escapeHtml(file.name) + '</span>' +
|
'<span class="file-name">' + escapeHtml(file.name) + '</span>' +
|
||||||
|
freshnessBadge +
|
||||||
(file.parentDirectory ? '<span class="file-path-hint">' + escapeHtml(file.parentDirectory) + '</span>' : '') +
|
(file.parentDirectory ? '<span class="file-path-hint">' + escapeHtml(file.parentDirectory) + '</span>' : '') +
|
||||||
'</div>';
|
'</div>';
|
||||||
}
|
}
|
||||||
@@ -446,6 +518,38 @@ function renderFileMetadata() {
|
|||||||
'</div>';
|
'</div>';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Freshness section
|
||||||
|
var fd = freshnessData[selectedFile.path];
|
||||||
|
if (fd) {
|
||||||
|
var freshnessBarClass = fd.freshness >= 75 ? 'good' : fd.freshness >= 50 ? 'warn' : 'stale';
|
||||||
|
html += '<div class="metadata-section freshness-section">' +
|
||||||
|
'<h4><i data-lucide="activity" class="w-4 h-4"></i> ' + (t('claudeManager.freshness') || 'Freshness') + '</h4>' +
|
||||||
|
'<div class="freshness-gauge">' +
|
||||||
|
'<div class="freshness-bar ' + freshnessBarClass + '" style="width: ' + fd.freshness + '%"></div>' +
|
||||||
|
'</div>' +
|
||||||
|
'<div class="freshness-value-display">' + fd.freshness + '%</div>' +
|
||||||
|
'<div class="metadata-item">' +
|
||||||
|
'<span class="label">' + (t('claudeManager.lastContentUpdate') || 'Last Content Update') + '</span>' +
|
||||||
|
'<span class="value">' + (fd.lastUpdated ? formatDate(fd.lastUpdated) : (t('claudeManager.never') || 'Never tracked')) + '</span>' +
|
||||||
|
'</div>' +
|
||||||
|
'<div class="metadata-item">' +
|
||||||
|
'<span class="label">' + (t('claudeManager.changedFiles') || 'Changed Files') + '</span>' +
|
||||||
|
'<span class="value">' + fd.changedFilesCount + ' ' + (t('claudeManager.filesSinceUpdate') || 'files since update') + '</span>' +
|
||||||
|
'</div>';
|
||||||
|
|
||||||
|
if (fd.needsUpdate) {
|
||||||
|
html += '<div class="update-reminder">' +
|
||||||
|
'<i data-lucide="alert-triangle" class="w-4 h-4"></i>' +
|
||||||
|
'<span>' + (t('claudeManager.updateReminder') || 'This file may need updating') + '</span>' +
|
||||||
|
'</div>';
|
||||||
|
}
|
||||||
|
|
||||||
|
html += '<button class="btn btn-sm btn-secondary full-width" onclick="markFileAsUpdated()">' +
|
||||||
|
'<i data-lucide="check-circle" class="w-4 h-4"></i> ' + (t('claudeManager.markAsUpdated') || 'Mark as Updated') +
|
||||||
|
'</button>' +
|
||||||
|
'</div>';
|
||||||
|
}
|
||||||
|
|
||||||
html += '<div class="metadata-section">' +
|
html += '<div class="metadata-section">' +
|
||||||
'<h4>' + t('claudeManager.actions') + '</h4>';
|
'<h4>' + t('claudeManager.actions') + '</h4>';
|
||||||
|
|
||||||
@@ -536,10 +640,12 @@ async function syncFileWithCLI() {
|
|||||||
var result = await response.json();
|
var result = await response.json();
|
||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
// Reload file content
|
// Reload file content and freshness data
|
||||||
var fileData = await loadFileContent(selectedFile.path);
|
var fileData = await loadFileContent(selectedFile.path);
|
||||||
if (fileData) {
|
if (fileData) {
|
||||||
selectedFile = fileData;
|
selectedFile = fileData;
|
||||||
|
await loadFreshnessData();
|
||||||
|
renderFileTree();
|
||||||
renderFileViewer();
|
renderFileViewer();
|
||||||
renderFileMetadata();
|
renderFileMetadata();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ const ParamsSchema = z.object({
|
|||||||
format: z.enum(['json', 'text', 'pretty']).default('json'),
|
format: z.enum(['json', 'text', 'pretty']).default('json'),
|
||||||
languages: z.array(z.string()).optional(),
|
languages: z.array(z.string()).optional(),
|
||||||
limit: z.number().default(20),
|
limit: z.number().default(20),
|
||||||
|
enrich: z.boolean().default(false),
|
||||||
// Additional fields for internal functions
|
// Additional fields for internal functions
|
||||||
file: z.string().optional(),
|
file: z.string().optional(),
|
||||||
key: z.string().optional(),
|
key: z.string().optional(),
|
||||||
@@ -516,7 +517,7 @@ async function initIndex(params: Params): Promise<ExecuteResult> {
|
|||||||
* @returns Execution result
|
* @returns Execution result
|
||||||
*/
|
*/
|
||||||
async function searchCode(params: Params): Promise<ExecuteResult> {
|
async function searchCode(params: Params): Promise<ExecuteResult> {
|
||||||
const { query, path = '.', limit = 20, mode = 'auto' } = params;
|
const { query, path = '.', limit = 20, mode = 'auto', enrich = false } = params;
|
||||||
|
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return { success: false, error: 'Query is required for search action' };
|
return { success: false, error: 'Query is required for search action' };
|
||||||
@@ -537,6 +538,10 @@ async function searchCode(params: Params): Promise<ExecuteResult> {
|
|||||||
const cliMode = modeMap[mode] || 'auto';
|
const cliMode = modeMap[mode] || 'auto';
|
||||||
const args = ['search', query, '--limit', limit.toString(), '--mode', cliMode, '--json'];
|
const args = ['search', query, '--limit', limit.toString(), '--mode', cliMode, '--json'];
|
||||||
|
|
||||||
|
if (enrich) {
|
||||||
|
args.push('--enrich');
|
||||||
|
}
|
||||||
|
|
||||||
const result = await executeCodexLens(args, { cwd: path });
|
const result = await executeCodexLens(args, { cwd: path });
|
||||||
|
|
||||||
if (result.success && result.output) {
|
if (result.success && result.output) {
|
||||||
@@ -557,7 +562,7 @@ async function searchCode(params: Params): Promise<ExecuteResult> {
|
|||||||
* @returns Execution result
|
* @returns Execution result
|
||||||
*/
|
*/
|
||||||
async function searchFiles(params: Params): Promise<ExecuteResult> {
|
async function searchFiles(params: Params): Promise<ExecuteResult> {
|
||||||
const { query, path = '.', limit = 20, mode = 'auto' } = params;
|
const { query, path = '.', limit = 20, mode = 'auto', enrich = false } = params;
|
||||||
|
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return { success: false, error: 'Query is required for search_files action' };
|
return { success: false, error: 'Query is required for search_files action' };
|
||||||
@@ -578,6 +583,10 @@ async function searchFiles(params: Params): Promise<ExecuteResult> {
|
|||||||
const cliMode = modeMap[mode] || 'auto';
|
const cliMode = modeMap[mode] || 'auto';
|
||||||
const args = ['search', query, '--files-only', '--limit', limit.toString(), '--mode', cliMode, '--json'];
|
const args = ['search', query, '--files-only', '--limit', limit.toString(), '--mode', cliMode, '--json'];
|
||||||
|
|
||||||
|
if (enrich) {
|
||||||
|
args.push('--enrich');
|
||||||
|
}
|
||||||
|
|
||||||
const result = await executeCodexLens(args, { cwd: path });
|
const result = await executeCodexLens(args, { cwd: path });
|
||||||
|
|
||||||
if (result.success && result.output) {
|
if (result.success && result.output) {
|
||||||
@@ -764,6 +773,9 @@ Usage:
|
|||||||
codex_lens(action="search", query="func", mode="hybrid") # Force hybrid search
|
codex_lens(action="search", query="func", mode="hybrid") # Force hybrid search
|
||||||
codex_lens(action="search_files", query="x") # Search, return paths only
|
codex_lens(action="search_files", query="x") # Search, return paths only
|
||||||
|
|
||||||
|
Graph Enrichment:
|
||||||
|
codex_lens(action="search", query="func", enrich=true) # Enrich results with code relationships
|
||||||
|
|
||||||
Search Modes:
|
Search Modes:
|
||||||
- auto: Auto-detect (hybrid if embeddings exist, exact otherwise) [default]
|
- auto: Auto-detect (hybrid if embeddings exist, exact otherwise) [default]
|
||||||
- exact/text: Exact FTS for code identifiers
|
- exact/text: Exact FTS for code identifiers
|
||||||
@@ -820,6 +832,11 @@ Note: For advanced operations (config, status, clean), use CLI directly: codexle
|
|||||||
description: 'Maximum number of search results (for search and search_files actions)',
|
description: 'Maximum number of search results (for search and search_files actions)',
|
||||||
default: 20,
|
default: 20,
|
||||||
},
|
},
|
||||||
|
enrich: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'Enrich search results with code graph relationships (calls, imports)',
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
required: ['action'],
|
required: ['action'],
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ const ParamsSchema = z.object({
|
|||||||
includeHidden: z.boolean().default(false),
|
includeHidden: z.boolean().default(false),
|
||||||
languages: z.array(z.string()).optional(),
|
languages: z.array(z.string()).optional(),
|
||||||
limit: z.number().default(10),
|
limit: z.number().default(10),
|
||||||
|
enrich: z.boolean().default(false),
|
||||||
});
|
});
|
||||||
|
|
||||||
type Params = z.infer<typeof ParamsSchema>;
|
type Params = z.infer<typeof ParamsSchema>;
|
||||||
@@ -59,11 +60,21 @@ interface ExactMatch {
|
|||||||
content: string;
|
content: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface RelationshipInfo {
|
||||||
|
type: string; // 'calls', 'imports', 'called_by', 'imported_by'
|
||||||
|
direction: 'outgoing' | 'incoming';
|
||||||
|
target?: string; // Target symbol name (for outgoing)
|
||||||
|
source?: string; // Source symbol name (for incoming)
|
||||||
|
file: string; // File path
|
||||||
|
line?: number; // Line number
|
||||||
|
}
|
||||||
|
|
||||||
interface SemanticMatch {
|
interface SemanticMatch {
|
||||||
file: string;
|
file: string;
|
||||||
score: number;
|
score: number;
|
||||||
content: string;
|
content: string;
|
||||||
symbol: string | null;
|
symbol: string | null;
|
||||||
|
relationships?: RelationshipInfo[];
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GraphMatch {
|
interface GraphMatch {
|
||||||
@@ -635,7 +646,7 @@ async function executeRipgrepMode(params: Params): Promise<SearchResult> {
|
|||||||
* Requires index
|
* Requires index
|
||||||
*/
|
*/
|
||||||
async function executeCodexLensExactMode(params: Params): Promise<SearchResult> {
|
async function executeCodexLensExactMode(params: Params): Promise<SearchResult> {
|
||||||
const { query, path = '.', maxResults = 10 } = params;
|
const { query, path = '.', maxResults = 10, enrich = false } = params;
|
||||||
|
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return {
|
return {
|
||||||
@@ -657,6 +668,9 @@ async function executeCodexLensExactMode(params: Params): Promise<SearchResult>
|
|||||||
const indexStatus = await checkIndexStatus(path);
|
const indexStatus = await checkIndexStatus(path);
|
||||||
|
|
||||||
const args = ['search', query, '--limit', maxResults.toString(), '--mode', 'exact', '--json'];
|
const args = ['search', query, '--limit', maxResults.toString(), '--mode', 'exact', '--json'];
|
||||||
|
if (enrich) {
|
||||||
|
args.push('--enrich');
|
||||||
|
}
|
||||||
const result = await executeCodexLens(args, { cwd: path });
|
const result = await executeCodexLens(args, { cwd: path });
|
||||||
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
@@ -707,7 +721,7 @@ async function executeCodexLensExactMode(params: Params): Promise<SearchResult>
|
|||||||
* Requires index with embeddings
|
* Requires index with embeddings
|
||||||
*/
|
*/
|
||||||
async function executeHybridMode(params: Params): Promise<SearchResult> {
|
async function executeHybridMode(params: Params): Promise<SearchResult> {
|
||||||
const { query, path = '.', maxResults = 10 } = params;
|
const { query, path = '.', maxResults = 10, enrich = false } = params;
|
||||||
|
|
||||||
if (!query) {
|
if (!query) {
|
||||||
return {
|
return {
|
||||||
@@ -729,6 +743,9 @@ async function executeHybridMode(params: Params): Promise<SearchResult> {
|
|||||||
const indexStatus = await checkIndexStatus(path);
|
const indexStatus = await checkIndexStatus(path);
|
||||||
|
|
||||||
const args = ['search', query, '--limit', maxResults.toString(), '--mode', 'hybrid', '--json'];
|
const args = ['search', query, '--limit', maxResults.toString(), '--mode', 'hybrid', '--json'];
|
||||||
|
if (enrich) {
|
||||||
|
args.push('--enrich');
|
||||||
|
}
|
||||||
const result = await executeCodexLens(args, { cwd: path });
|
const result = await executeCodexLens(args, { cwd: path });
|
||||||
|
|
||||||
if (!result.success) {
|
if (!result.success) {
|
||||||
@@ -958,6 +975,9 @@ export const schema: ToolSchema = {
|
|||||||
smart_search(action="init") # Create FTS index for current directory
|
smart_search(action="init") # Create FTS index for current directory
|
||||||
smart_search(action="status") # Check index and embedding status
|
smart_search(action="status") # Check index and embedding status
|
||||||
|
|
||||||
|
**Graph Enrichment:**
|
||||||
|
smart_search(query="func", enrich=true) # Enrich results with code relationships (calls, imports, called_by, imported_by)
|
||||||
|
|
||||||
**Modes:** auto (intelligent routing), hybrid (semantic, needs index), exact (FTS), ripgrep (fast, no index), priority (fallback: hybrid→exact→ripgrep)`,
|
**Modes:** auto (intelligent routing), hybrid (semantic, needs index), exact (FTS), ripgrep (fast, no index), priority (fallback: hybrid→exact→ripgrep)`,
|
||||||
inputSchema: {
|
inputSchema: {
|
||||||
type: 'object',
|
type: 'object',
|
||||||
@@ -1021,6 +1041,11 @@ export const schema: ToolSchema = {
|
|||||||
items: { type: 'string' },
|
items: { type: 'string' },
|
||||||
description: 'Languages to index (for init action). Example: ["javascript", "typescript"]',
|
description: 'Languages to index (for init action). Example: ["javascript", "typescript"]',
|
||||||
},
|
},
|
||||||
|
enrich: {
|
||||||
|
type: 'boolean',
|
||||||
|
description: 'Enrich search results with code graph relationships (calls, imports, called_by, imported_by).',
|
||||||
|
default: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
required: [],
|
required: [],
|
||||||
},
|
},
|
||||||
|
|||||||
257
ccw/tests/smart-search-enrich.test.js
Normal file
257
ccw/tests/smart-search-enrich.test.js
Normal file
@@ -0,0 +1,257 @@
|
|||||||
|
/**
|
||||||
|
* Tests for smart_search with enrich parameter
|
||||||
|
*
|
||||||
|
* Tests the following:
|
||||||
|
* - enrich parameter is passed to codex-lens
|
||||||
|
* - relationship data is parsed from response
|
||||||
|
* - SemanticMatch interface with relationships field
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { describe, it, before, mock } from 'node:test';
|
||||||
|
import assert from 'node:assert';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
// Import the smart-search module (exports schema, not smartSearchTool)
|
||||||
|
const smartSearchPath = new URL('../dist/tools/smart-search.js', import.meta.url).href;
|
||||||
|
|
||||||
|
describe('Smart Search Enrich Parameter', async () => {
|
||||||
|
let smartSearchModule;
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
try {
|
||||||
|
smartSearchModule = await import(smartSearchPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Note: smart-search module import skipped:', err.message);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Parameter Schema', () => {
|
||||||
|
it('should have enrich parameter in schema', async () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { schema } = smartSearchModule;
|
||||||
|
assert.ok(schema, 'Should export schema');
|
||||||
|
// Schema uses inputSchema (MCP standard), not parameters
|
||||||
|
const params = schema.inputSchema || schema.parameters;
|
||||||
|
assert.ok(params, 'Should have inputSchema or parameters');
|
||||||
|
|
||||||
|
const props = params.properties;
|
||||||
|
assert.ok(props.enrich, 'Should have enrich parameter');
|
||||||
|
assert.strictEqual(props.enrich.type, 'boolean', 'enrich should be boolean');
|
||||||
|
assert.strictEqual(props.enrich.default, false, 'enrich should default to false');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should describe enrich parameter purpose', async () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { schema } = smartSearchModule;
|
||||||
|
const params = schema.inputSchema || schema.parameters;
|
||||||
|
const enrichDesc = params.properties.enrich?.description || '';
|
||||||
|
|
||||||
|
// Description should mention relationships or graph
|
||||||
|
const mentionsRelationships = enrichDesc.toLowerCase().includes('relationship') ||
|
||||||
|
enrichDesc.toLowerCase().includes('graph') ||
|
||||||
|
enrichDesc.toLowerCase().includes('enrich');
|
||||||
|
assert.ok(mentionsRelationships, 'enrich description should mention relationships/graph');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('SemanticMatch Interface', () => {
|
||||||
|
it('should handle results with relationships field', async () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a mock result with relationships
|
||||||
|
const mockResult = {
|
||||||
|
file: 'test.py',
|
||||||
|
score: 0.95,
|
||||||
|
content: 'def main(): pass',
|
||||||
|
symbol: 'main',
|
||||||
|
relationships: [
|
||||||
|
{
|
||||||
|
type: 'calls',
|
||||||
|
direction: 'outgoing',
|
||||||
|
target: 'helper',
|
||||||
|
file: 'test.py',
|
||||||
|
line: 5
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'called_by',
|
||||||
|
direction: 'incoming',
|
||||||
|
source: 'entrypoint',
|
||||||
|
file: 'app.py',
|
||||||
|
line: 10
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
|
|
||||||
|
// Verify structure
|
||||||
|
assert.ok(Array.isArray(mockResult.relationships), 'relationships should be array');
|
||||||
|
assert.strictEqual(mockResult.relationships.length, 2, 'should have 2 relationships');
|
||||||
|
|
||||||
|
const outgoing = mockResult.relationships[0];
|
||||||
|
assert.strictEqual(outgoing.type, 'calls');
|
||||||
|
assert.strictEqual(outgoing.direction, 'outgoing');
|
||||||
|
assert.ok(outgoing.target, 'outgoing should have target');
|
||||||
|
|
||||||
|
const incoming = mockResult.relationships[1];
|
||||||
|
assert.strictEqual(incoming.type, 'called_by');
|
||||||
|
assert.strictEqual(incoming.direction, 'incoming');
|
||||||
|
assert.ok(incoming.source, 'incoming should have source');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('RelationshipInfo Structure', () => {
|
||||||
|
it('should validate relationship info structure', () => {
|
||||||
|
// Test the expected structure of RelationshipInfo
|
||||||
|
const validRelationship = {
|
||||||
|
type: 'calls',
|
||||||
|
direction: 'outgoing',
|
||||||
|
target: 'some_function',
|
||||||
|
file: 'module.py',
|
||||||
|
line: 42
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.ok(['calls', 'imports', 'extends', 'called_by', 'imported_by', 'extended_by']
|
||||||
|
.includes(validRelationship.type), 'type should be valid relationship type');
|
||||||
|
assert.ok(['outgoing', 'incoming'].includes(validRelationship.direction),
|
||||||
|
'direction should be outgoing or incoming');
|
||||||
|
assert.ok(typeof validRelationship.file === 'string', 'file should be string');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should allow optional line number', () => {
|
||||||
|
const withLine = {
|
||||||
|
type: 'calls',
|
||||||
|
direction: 'outgoing',
|
||||||
|
target: 'func',
|
||||||
|
file: 'test.py',
|
||||||
|
line: 10
|
||||||
|
};
|
||||||
|
|
||||||
|
const withoutLine = {
|
||||||
|
type: 'imports',
|
||||||
|
direction: 'outgoing',
|
||||||
|
target: 'os',
|
||||||
|
file: 'test.py'
|
||||||
|
// line is optional
|
||||||
|
};
|
||||||
|
|
||||||
|
assert.strictEqual(withLine.line, 10);
|
||||||
|
assert.strictEqual(withoutLine.line, undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Smart Search Tool Definition', async () => {
|
||||||
|
let smartSearchModule;
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
try {
|
||||||
|
smartSearchModule = await import(smartSearchPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Note: smart-search module not available');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have correct tool name', () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
assert.strictEqual(smartSearchModule.schema.name, 'smart_search');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should have all required parameters', () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = smartSearchModule.schema.inputSchema || smartSearchModule.schema.parameters;
|
||||||
|
const props = params.properties;
|
||||||
|
|
||||||
|
// Core parameters
|
||||||
|
assert.ok(props.action, 'Should have action parameter');
|
||||||
|
assert.ok(props.query, 'Should have query parameter');
|
||||||
|
assert.ok(props.path, 'Should have path parameter');
|
||||||
|
|
||||||
|
// Search parameters
|
||||||
|
assert.ok(props.mode, 'Should have mode parameter');
|
||||||
|
assert.ok(props.maxResults || props.limit, 'Should have maxResults/limit parameter');
|
||||||
|
|
||||||
|
// New enrich parameter
|
||||||
|
assert.ok(props.enrich, 'Should have enrich parameter');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support search modes', () => {
|
||||||
|
if (!smartSearchModule) {
|
||||||
|
console.log('Skipping: smart-search module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const params = smartSearchModule.schema.inputSchema || smartSearchModule.schema.parameters;
|
||||||
|
const modeEnum = params.properties.mode?.enum;
|
||||||
|
|
||||||
|
assert.ok(modeEnum, 'Should have mode enum');
|
||||||
|
assert.ok(modeEnum.includes('auto'), 'Should support auto mode');
|
||||||
|
assert.ok(modeEnum.includes('hybrid'), 'Should support hybrid mode');
|
||||||
|
assert.ok(modeEnum.includes('exact'), 'Should support exact mode');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('Enrich Flag Integration', async () => {
|
||||||
|
let codexLensModule;
|
||||||
|
|
||||||
|
before(async () => {
|
||||||
|
try {
|
||||||
|
const codexLensPath = new URL('../dist/tools/codex-lens.js', import.meta.url).href;
|
||||||
|
codexLensModule = await import(codexLensPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('Note: codex-lens module not available');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('codex-lens should support enrich parameter', () => {
|
||||||
|
if (!codexLensModule) {
|
||||||
|
console.log('Skipping: codex-lens module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use schema export (primary) or codexLensTool (backward-compatible)
|
||||||
|
const toolDef = codexLensModule.schema || codexLensModule.codexLensTool;
|
||||||
|
assert.ok(toolDef, 'Should have schema or codexLensTool export');
|
||||||
|
|
||||||
|
// Schema uses inputSchema (MCP standard), codexLensTool uses parameters
|
||||||
|
const params = toolDef.inputSchema || toolDef.parameters;
|
||||||
|
const props = params.properties;
|
||||||
|
assert.ok(props.enrich, 'should have enrich parameter');
|
||||||
|
assert.strictEqual(props.enrich.type, 'boolean', 'enrich should be boolean');
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should pass enrich flag to command line', async () => {
|
||||||
|
if (!codexLensModule) {
|
||||||
|
console.log('Skipping: codex-lens module not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if executeCodexLens function is exported
|
||||||
|
const { executeCodexLens } = codexLensModule;
|
||||||
|
if (executeCodexLens) {
|
||||||
|
// The function should be available for passing enrich parameter
|
||||||
|
assert.ok(typeof executeCodexLens === 'function', 'executeCodexLens should be a function');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
@@ -268,6 +268,7 @@ def search(
|
|||||||
files_only: bool = typer.Option(False, "--files-only", "-f", help="Return only file paths without content snippets."),
|
files_only: bool = typer.Option(False, "--files-only", "-f", help="Return only file paths without content snippets."),
|
||||||
mode: str = typer.Option("auto", "--mode", "-m", help="Search mode: auto, exact, fuzzy, hybrid, vector, pure-vector."),
|
mode: str = typer.Option("auto", "--mode", "-m", help="Search mode: auto, exact, fuzzy, hybrid, vector, pure-vector."),
|
||||||
weights: Optional[str] = typer.Option(None, "--weights", help="Custom RRF weights as 'exact,fuzzy,vector' (e.g., '0.5,0.3,0.2')."),
|
weights: Optional[str] = typer.Option(None, "--weights", help="Custom RRF weights as 'exact,fuzzy,vector' (e.g., '0.5,0.3,0.2')."),
|
||||||
|
enrich: bool = typer.Option(False, "--enrich", help="Enrich results with code graph relationships (calls, imports)."),
|
||||||
json_mode: bool = typer.Option(False, "--json", help="Output JSON response."),
|
json_mode: bool = typer.Option(False, "--json", help="Output JSON response."),
|
||||||
verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable debug logging."),
|
verbose: bool = typer.Option(False, "--verbose", "-v", help="Enable debug logging."),
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -411,19 +412,42 @@ def search(
|
|||||||
console.print(fp)
|
console.print(fp)
|
||||||
else:
|
else:
|
||||||
result = engine.search(query, search_path, options)
|
result = engine.search(query, search_path, options)
|
||||||
|
results_list = [
|
||||||
|
{
|
||||||
|
"path": r.path,
|
||||||
|
"score": r.score,
|
||||||
|
"excerpt": r.excerpt,
|
||||||
|
"source": getattr(r, "search_source", None),
|
||||||
|
"symbol": getattr(r, "symbol", None),
|
||||||
|
}
|
||||||
|
for r in result.results
|
||||||
|
]
|
||||||
|
|
||||||
|
# Enrich results with relationship data if requested
|
||||||
|
enriched = False
|
||||||
|
if enrich:
|
||||||
|
try:
|
||||||
|
from codexlens.search.enrichment import RelationshipEnricher
|
||||||
|
|
||||||
|
# Find index path for the search path
|
||||||
|
project_record = registry.find_by_source_path(str(search_path))
|
||||||
|
if project_record:
|
||||||
|
index_path = Path(project_record["index_root"]) / "_index.db"
|
||||||
|
if index_path.exists():
|
||||||
|
with RelationshipEnricher(index_path) as enricher:
|
||||||
|
results_list = enricher.enrich(results_list, limit=limit)
|
||||||
|
enriched = True
|
||||||
|
except Exception as e:
|
||||||
|
# Enrichment failure should not break search
|
||||||
|
if verbose:
|
||||||
|
console.print(f"[yellow]Warning: Enrichment failed: {e}[/yellow]")
|
||||||
|
|
||||||
payload = {
|
payload = {
|
||||||
"query": query,
|
"query": query,
|
||||||
"mode": actual_mode,
|
"mode": actual_mode,
|
||||||
"count": len(result.results),
|
"count": len(results_list),
|
||||||
"results": [
|
"enriched": enriched,
|
||||||
{
|
"results": results_list,
|
||||||
"path": r.path,
|
|
||||||
"score": r.score,
|
|
||||||
"excerpt": r.excerpt,
|
|
||||||
"source": getattr(r, "search_source", None),
|
|
||||||
}
|
|
||||||
for r in result.results
|
|
||||||
],
|
|
||||||
"stats": {
|
"stats": {
|
||||||
"dirs_searched": result.stats.dirs_searched,
|
"dirs_searched": result.stats.dirs_searched,
|
||||||
"files_matched": result.stats.files_matched,
|
"files_matched": result.stats.files_matched,
|
||||||
@@ -434,7 +458,8 @@ def search(
|
|||||||
print_json(success=True, result=payload)
|
print_json(success=True, result=payload)
|
||||||
else:
|
else:
|
||||||
render_search_results(result.results, verbose=verbose)
|
render_search_results(result.results, verbose=verbose)
|
||||||
console.print(f"[dim]Mode: {actual_mode} | Searched {result.stats.dirs_searched} directories in {result.stats.time_ms:.1f}ms[/dim]")
|
enrich_status = " | [green]Enriched[/green]" if enriched else ""
|
||||||
|
console.print(f"[dim]Mode: {actual_mode} | Searched {result.stats.dirs_searched} directories in {result.stats.time_ms:.1f}ms{enrich_status}[/dim]")
|
||||||
|
|
||||||
except SearchError as exc:
|
except SearchError as exc:
|
||||||
if json_mode:
|
if json_mode:
|
||||||
|
|||||||
77
codex-lens/src/codexlens/indexing/README.md
Normal file
77
codex-lens/src/codexlens/indexing/README.md
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
# Symbol Extraction and Indexing
|
||||||
|
|
||||||
|
This module provides symbol extraction and relationship tracking for code graph enrichment.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The `SymbolExtractor` class extracts code symbols (functions, classes) and their relationships (calls, imports) from source files using regex-based pattern matching.
|
||||||
|
|
||||||
|
## Supported Languages
|
||||||
|
|
||||||
|
- Python (.py)
|
||||||
|
- TypeScript (.ts, .tsx)
|
||||||
|
- JavaScript (.js, .jsx)
|
||||||
|
|
||||||
|
## Database Schema
|
||||||
|
|
||||||
|
### Symbols Table
|
||||||
|
Stores code symbols with their location information:
|
||||||
|
- `id`: Primary key
|
||||||
|
- `qualified_name`: Fully qualified name (e.g., "module.ClassName")
|
||||||
|
- `name`: Symbol name
|
||||||
|
- `kind`: Symbol type (function, class)
|
||||||
|
- `file_path`: Path to source file
|
||||||
|
- `start_line`: Starting line number
|
||||||
|
- `end_line`: Ending line number
|
||||||
|
|
||||||
|
### Symbol Relationships Table
|
||||||
|
Stores relationships between symbols:
|
||||||
|
- `id`: Primary key
|
||||||
|
- `source_symbol_id`: Foreign key to symbols table
|
||||||
|
- `target_symbol_fqn`: Fully qualified name of target symbol
|
||||||
|
- `relationship_type`: Type of relationship (calls, imports)
|
||||||
|
- `file_path`: Path to source file
|
||||||
|
- `line`: Line number where relationship occurs
|
||||||
|
|
||||||
|
## Usage Example
|
||||||
|
|
||||||
|
```python
|
||||||
|
from pathlib import Path
|
||||||
|
from codexlens.indexing.symbol_extractor import SymbolExtractor
|
||||||
|
|
||||||
|
# Initialize extractor
|
||||||
|
db_path = Path("./code_index.db")
|
||||||
|
extractor = SymbolExtractor(db_path)
|
||||||
|
extractor.connect()
|
||||||
|
|
||||||
|
# Extract from file
|
||||||
|
file_path = Path("src/my_module.py")
|
||||||
|
with open(file_path) as f:
|
||||||
|
content = f.read()
|
||||||
|
|
||||||
|
symbols, relationships = extractor.extract_from_file(file_path, content)
|
||||||
|
|
||||||
|
# Save to database
|
||||||
|
name_to_id = extractor.save_symbols(symbols)
|
||||||
|
extractor.save_relationships(relationships, name_to_id)
|
||||||
|
|
||||||
|
# Clean up
|
||||||
|
extractor.close()
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pattern Matching
|
||||||
|
|
||||||
|
The extractor uses regex patterns to identify:
|
||||||
|
|
||||||
|
- **Functions**: Function definitions (including async, export keywords)
|
||||||
|
- **Classes**: Class definitions (including export keyword)
|
||||||
|
- **Imports**: Import/require statements
|
||||||
|
- **Calls**: Function/method invocations
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
- Tree-sitter integration for more accurate parsing
|
||||||
|
- Support for additional languages
|
||||||
|
- Method and variable extraction
|
||||||
|
- Enhanced scope tracking
|
||||||
|
- Relationship type expansion (inherits, implements, etc.)
|
||||||
4
codex-lens/src/codexlens/indexing/__init__.py
Normal file
4
codex-lens/src/codexlens/indexing/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
"""Code indexing and symbol extraction."""
|
||||||
|
from codexlens.indexing.symbol_extractor import SymbolExtractor
|
||||||
|
|
||||||
|
__all__ = ["SymbolExtractor"]
|
||||||
234
codex-lens/src/codexlens/indexing/symbol_extractor.py
Normal file
234
codex-lens/src/codexlens/indexing/symbol_extractor.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
"""Symbol and relationship extraction from source code."""
|
||||||
|
import re
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
|
||||||
|
class SymbolExtractor:
|
||||||
|
"""Extract symbols and relationships from source code using regex patterns."""
|
||||||
|
|
||||||
|
# Pattern definitions for different languages
|
||||||
|
PATTERNS = {
|
||||||
|
'python': {
|
||||||
|
'function': r'^(?:async\s+)?def\s+(\w+)\s*\(',
|
||||||
|
'class': r'^class\s+(\w+)\s*[:\(]',
|
||||||
|
'import': r'^(?:from\s+([\w.]+)\s+)?import\s+([\w.,\s]+)',
|
||||||
|
'call': r'(?<![.\w])(\w+)\s*\(',
|
||||||
|
},
|
||||||
|
'typescript': {
|
||||||
|
'function': r'(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*[<\(]',
|
||||||
|
'class': r'(?:export\s+)?class\s+(\w+)',
|
||||||
|
'import': r"import\s+.*\s+from\s+['\"]([^'\"]+)['\"]",
|
||||||
|
'call': r'(?<![.\w])(\w+)\s*[<\(]',
|
||||||
|
},
|
||||||
|
'javascript': {
|
||||||
|
'function': r'(?:export\s+)?(?:async\s+)?function\s+(\w+)\s*\(',
|
||||||
|
'class': r'(?:export\s+)?class\s+(\w+)',
|
||||||
|
'import': r"(?:import|require)\s*\(?['\"]([^'\"]+)['\"]",
|
||||||
|
'call': r'(?<![.\w])(\w+)\s*\(',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
LANGUAGE_MAP = {
|
||||||
|
'.py': 'python',
|
||||||
|
'.ts': 'typescript',
|
||||||
|
'.tsx': 'typescript',
|
||||||
|
'.js': 'javascript',
|
||||||
|
'.jsx': 'javascript',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, db_path: Path):
|
||||||
|
self.db_path = db_path
|
||||||
|
self.db_conn: Optional[sqlite3.Connection] = None
|
||||||
|
|
||||||
|
def connect(self) -> None:
|
||||||
|
"""Connect to database and ensure schema exists."""
|
||||||
|
self.db_conn = sqlite3.connect(str(self.db_path))
|
||||||
|
self._ensure_tables()
|
||||||
|
|
||||||
|
def _ensure_tables(self) -> None:
|
||||||
|
"""Create symbols and relationships tables if they don't exist."""
|
||||||
|
if not self.db_conn:
|
||||||
|
return
|
||||||
|
cursor = self.db_conn.cursor()
|
||||||
|
|
||||||
|
# Create symbols table with qualified_name
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS symbols (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
qualified_name TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
kind TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
start_line INTEGER NOT NULL,
|
||||||
|
end_line INTEGER NOT NULL,
|
||||||
|
UNIQUE(file_path, name, start_line)
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Create relationships table with target_symbol_fqn
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS symbol_relationships (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
source_symbol_id INTEGER NOT NULL,
|
||||||
|
target_symbol_fqn TEXT NOT NULL,
|
||||||
|
relationship_type TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
line INTEGER,
|
||||||
|
FOREIGN KEY (source_symbol_id) REFERENCES symbols(id) ON DELETE CASCADE
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Create performance indexes
|
||||||
|
cursor.execute('CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name)')
|
||||||
|
cursor.execute('CREATE INDEX IF NOT EXISTS idx_symbols_file ON symbols(file_path)')
|
||||||
|
cursor.execute('CREATE INDEX IF NOT EXISTS idx_rel_source ON symbol_relationships(source_symbol_id)')
|
||||||
|
cursor.execute('CREATE INDEX IF NOT EXISTS idx_rel_target ON symbol_relationships(target_symbol_fqn)')
|
||||||
|
cursor.execute('CREATE INDEX IF NOT EXISTS idx_rel_type ON symbol_relationships(relationship_type)')
|
||||||
|
|
||||||
|
self.db_conn.commit()
|
||||||
|
|
||||||
|
def extract_from_file(self, file_path: Path, content: str) -> Tuple[List[Dict], List[Dict]]:
|
||||||
|
"""Extract symbols and relationships from file content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to the source file
|
||||||
|
content: File content as string
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (symbols, relationships) where:
|
||||||
|
- symbols: List of symbol dicts with qualified_name, name, kind, file_path, start_line, end_line
|
||||||
|
- relationships: List of relationship dicts with source_scope, target, type, file_path, line
|
||||||
|
"""
|
||||||
|
ext = file_path.suffix.lower()
|
||||||
|
lang = self.LANGUAGE_MAP.get(ext)
|
||||||
|
|
||||||
|
if not lang or lang not in self.PATTERNS:
|
||||||
|
return [], []
|
||||||
|
|
||||||
|
patterns = self.PATTERNS[lang]
|
||||||
|
symbols = []
|
||||||
|
relationships = []
|
||||||
|
lines = content.split('\n')
|
||||||
|
|
||||||
|
current_scope = None
|
||||||
|
|
||||||
|
for line_num, line in enumerate(lines, 1):
|
||||||
|
# Extract function/class definitions
|
||||||
|
for kind in ['function', 'class']:
|
||||||
|
if kind in patterns:
|
||||||
|
match = re.search(patterns[kind], line)
|
||||||
|
if match:
|
||||||
|
name = match.group(1)
|
||||||
|
qualified_name = f"{file_path.stem}.{name}"
|
||||||
|
symbols.append({
|
||||||
|
'qualified_name': qualified_name,
|
||||||
|
'name': name,
|
||||||
|
'kind': kind,
|
||||||
|
'file_path': str(file_path),
|
||||||
|
'start_line': line_num,
|
||||||
|
'end_line': line_num, # Simplified - would need proper parsing for actual end
|
||||||
|
})
|
||||||
|
current_scope = name
|
||||||
|
|
||||||
|
# Extract imports
|
||||||
|
if 'import' in patterns:
|
||||||
|
match = re.search(patterns['import'], line)
|
||||||
|
if match:
|
||||||
|
import_target = match.group(1) or match.group(2) if match.lastindex >= 2 else match.group(1)
|
||||||
|
if import_target and current_scope:
|
||||||
|
relationships.append({
|
||||||
|
'source_scope': current_scope,
|
||||||
|
'target': import_target.strip(),
|
||||||
|
'type': 'imports',
|
||||||
|
'file_path': str(file_path),
|
||||||
|
'line': line_num,
|
||||||
|
})
|
||||||
|
|
||||||
|
# Extract function calls (simplified)
|
||||||
|
if 'call' in patterns and current_scope:
|
||||||
|
for match in re.finditer(patterns['call'], line):
|
||||||
|
call_name = match.group(1)
|
||||||
|
# Skip common keywords and the current function
|
||||||
|
if call_name not in ['if', 'for', 'while', 'return', 'print', 'len', 'str', 'int', 'float', 'list', 'dict', 'set', 'tuple', current_scope]:
|
||||||
|
relationships.append({
|
||||||
|
'source_scope': current_scope,
|
||||||
|
'target': call_name,
|
||||||
|
'type': 'calls',
|
||||||
|
'file_path': str(file_path),
|
||||||
|
'line': line_num,
|
||||||
|
})
|
||||||
|
|
||||||
|
return symbols, relationships
|
||||||
|
|
||||||
|
def save_symbols(self, symbols: List[Dict]) -> Dict[str, int]:
|
||||||
|
"""Save symbols to database and return name->id mapping.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
symbols: List of symbol dicts with qualified_name, name, kind, file_path, start_line, end_line
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping symbol name to database id
|
||||||
|
"""
|
||||||
|
if not self.db_conn or not symbols:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
cursor = self.db_conn.cursor()
|
||||||
|
name_to_id = {}
|
||||||
|
|
||||||
|
for sym in symbols:
|
||||||
|
try:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT OR IGNORE INTO symbols
|
||||||
|
(qualified_name, name, kind, file_path, start_line, end_line)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?)
|
||||||
|
''', (sym['qualified_name'], sym['name'], sym['kind'],
|
||||||
|
sym['file_path'], sym['start_line'], sym['end_line']))
|
||||||
|
|
||||||
|
# Get the id
|
||||||
|
cursor.execute('''
|
||||||
|
SELECT id FROM symbols
|
||||||
|
WHERE file_path = ? AND name = ? AND start_line = ?
|
||||||
|
''', (sym['file_path'], sym['name'], sym['start_line']))
|
||||||
|
|
||||||
|
row = cursor.fetchone()
|
||||||
|
if row:
|
||||||
|
name_to_id[sym['name']] = row[0]
|
||||||
|
except sqlite3.Error:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.db_conn.commit()
|
||||||
|
return name_to_id
|
||||||
|
|
||||||
|
def save_relationships(self, relationships: List[Dict], name_to_id: Dict[str, int]) -> None:
|
||||||
|
"""Save relationships to database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
relationships: List of relationship dicts with source_scope, target, type, file_path, line
|
||||||
|
name_to_id: Dictionary mapping symbol names to database ids
|
||||||
|
"""
|
||||||
|
if not self.db_conn or not relationships:
|
||||||
|
return
|
||||||
|
|
||||||
|
cursor = self.db_conn.cursor()
|
||||||
|
|
||||||
|
for rel in relationships:
|
||||||
|
source_id = name_to_id.get(rel['source_scope'])
|
||||||
|
if source_id:
|
||||||
|
try:
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbol_relationships
|
||||||
|
(source_symbol_id, target_symbol_fqn, relationship_type, file_path, line)
|
||||||
|
VALUES (?, ?, ?, ?, ?)
|
||||||
|
''', (source_id, rel['target'], rel['type'], rel['file_path'], rel['line']))
|
||||||
|
except sqlite3.Error:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.db_conn.commit()
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""Close database connection."""
|
||||||
|
if self.db_conn:
|
||||||
|
self.db_conn.close()
|
||||||
|
self.db_conn = None
|
||||||
150
codex-lens/src/codexlens/search/enrichment.py
Normal file
150
codex-lens/src/codexlens/search/enrichment.py
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
# codex-lens/src/codexlens/search/enrichment.py
|
||||||
|
"""Relationship enrichment for search results."""
|
||||||
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import List, Dict, Any, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class RelationshipEnricher:
|
||||||
|
"""Enriches search results with code graph relationships."""
|
||||||
|
|
||||||
|
def __init__(self, index_path: Path):
|
||||||
|
"""Initialize with path to index database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
index_path: Path to _index.db SQLite database
|
||||||
|
"""
|
||||||
|
self.index_path = index_path
|
||||||
|
self.db_conn: Optional[sqlite3.Connection] = None
|
||||||
|
self._connect()
|
||||||
|
|
||||||
|
def _connect(self) -> None:
|
||||||
|
"""Establish read-only database connection."""
|
||||||
|
if self.index_path.exists():
|
||||||
|
self.db_conn = sqlite3.connect(
|
||||||
|
f"file:{self.index_path}?mode=ro",
|
||||||
|
uri=True,
|
||||||
|
check_same_thread=False
|
||||||
|
)
|
||||||
|
self.db_conn.row_factory = sqlite3.Row
|
||||||
|
|
||||||
|
def enrich(self, results: List[Dict[str, Any]], limit: int = 10) -> List[Dict[str, Any]]:
|
||||||
|
"""Add relationship data to search results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
results: List of search result dictionaries
|
||||||
|
limit: Maximum number of results to enrich
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Results with relationships field added
|
||||||
|
"""
|
||||||
|
if not self.db_conn:
|
||||||
|
return results
|
||||||
|
|
||||||
|
for result in results[:limit]:
|
||||||
|
file_path = result.get('file') or result.get('path')
|
||||||
|
symbol_name = result.get('symbol')
|
||||||
|
result['relationships'] = self._find_relationships(file_path, symbol_name)
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _find_relationships(self, file_path: Optional[str], symbol_name: Optional[str]) -> List[Dict[str, Any]]:
|
||||||
|
"""Query relationships for a symbol.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to file containing the symbol
|
||||||
|
symbol_name: Name of the symbol
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of relationship dictionaries with type, direction, target/source, file, line
|
||||||
|
"""
|
||||||
|
if not self.db_conn or not symbol_name:
|
||||||
|
return []
|
||||||
|
|
||||||
|
relationships = []
|
||||||
|
cursor = self.db_conn.cursor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Find symbol ID(s) by name and optionally file
|
||||||
|
if file_path:
|
||||||
|
cursor.execute(
|
||||||
|
'SELECT id FROM symbols WHERE name = ? AND file_path = ?',
|
||||||
|
(symbol_name, file_path)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cursor.execute('SELECT id FROM symbols WHERE name = ?', (symbol_name,))
|
||||||
|
|
||||||
|
symbol_ids = [row[0] for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
if not symbol_ids:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Query outgoing relationships (symbol is source)
|
||||||
|
placeholders = ','.join('?' * len(symbol_ids))
|
||||||
|
cursor.execute(f'''
|
||||||
|
SELECT sr.relationship_type, sr.target_symbol_fqn, sr.file_path, sr.line
|
||||||
|
FROM symbol_relationships sr
|
||||||
|
WHERE sr.source_symbol_id IN ({placeholders})
|
||||||
|
''', symbol_ids)
|
||||||
|
|
||||||
|
for row in cursor.fetchall():
|
||||||
|
relationships.append({
|
||||||
|
'type': row[0],
|
||||||
|
'direction': 'outgoing',
|
||||||
|
'target': row[1],
|
||||||
|
'file': row[2],
|
||||||
|
'line': row[3],
|
||||||
|
})
|
||||||
|
|
||||||
|
# Query incoming relationships (symbol is target)
|
||||||
|
# Match against symbol name or qualified name patterns
|
||||||
|
cursor.execute('''
|
||||||
|
SELECT sr.relationship_type, s.name AS source_name, sr.file_path, sr.line
|
||||||
|
FROM symbol_relationships sr
|
||||||
|
JOIN symbols s ON sr.source_symbol_id = s.id
|
||||||
|
WHERE sr.target_symbol_fqn = ? OR sr.target_symbol_fqn LIKE ?
|
||||||
|
''', (symbol_name, f'%.{symbol_name}'))
|
||||||
|
|
||||||
|
for row in cursor.fetchall():
|
||||||
|
rel_type = row[0]
|
||||||
|
# Convert to incoming type
|
||||||
|
incoming_type = self._to_incoming_type(rel_type)
|
||||||
|
relationships.append({
|
||||||
|
'type': incoming_type,
|
||||||
|
'direction': 'incoming',
|
||||||
|
'source': row[1],
|
||||||
|
'file': row[2],
|
||||||
|
'line': row[3],
|
||||||
|
})
|
||||||
|
|
||||||
|
except sqlite3.Error:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return relationships
|
||||||
|
|
||||||
|
def _to_incoming_type(self, outgoing_type: str) -> str:
|
||||||
|
"""Convert outgoing relationship type to incoming type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
outgoing_type: The outgoing relationship type (e.g., 'calls', 'imports')
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Corresponding incoming type (e.g., 'called_by', 'imported_by')
|
||||||
|
"""
|
||||||
|
type_map = {
|
||||||
|
'calls': 'called_by',
|
||||||
|
'imports': 'imported_by',
|
||||||
|
'extends': 'extended_by',
|
||||||
|
}
|
||||||
|
return type_map.get(outgoing_type, f'{outgoing_type}_by')
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""Close database connection."""
|
||||||
|
if self.db_conn:
|
||||||
|
self.db_conn.close()
|
||||||
|
self.db_conn = None
|
||||||
|
|
||||||
|
def __enter__(self) -> 'RelationshipEnricher':
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||||
|
self.close()
|
||||||
122
codex-lens/tests/test_cli_search.py
Normal file
122
codex-lens/tests/test_cli_search.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
"""Tests for CLI search command with --enrich flag."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import pytest
|
||||||
|
from typer.testing import CliRunner
|
||||||
|
from codexlens.cli.commands import app
|
||||||
|
|
||||||
|
|
||||||
|
class TestCLISearchEnrich:
|
||||||
|
"""Test CLI search command with --enrich flag integration."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def runner(self):
|
||||||
|
"""Create CLI test runner."""
|
||||||
|
return CliRunner()
|
||||||
|
|
||||||
|
def test_search_with_enrich_flag_help(self, runner):
|
||||||
|
"""Test --enrich flag is documented in help."""
|
||||||
|
result = runner.invoke(app, ["search", "--help"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert "--enrich" in result.output
|
||||||
|
assert "relationships" in result.output.lower() or "graph" in result.output.lower()
|
||||||
|
|
||||||
|
def test_search_with_enrich_flag_accepted(self, runner):
|
||||||
|
"""Test --enrich flag is accepted by the CLI."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--enrich"])
|
||||||
|
# Should not show 'unknown option' error
|
||||||
|
assert "No such option" not in result.output
|
||||||
|
assert "error: unrecognized" not in result.output.lower()
|
||||||
|
|
||||||
|
def test_search_without_enrich_flag(self, runner):
|
||||||
|
"""Test search without --enrich flag has no relationships."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--json"])
|
||||||
|
# Even without an index, JSON should be attempted
|
||||||
|
if result.exit_code == 0:
|
||||||
|
try:
|
||||||
|
data = json.loads(result.output)
|
||||||
|
# If we get results, they should not have enriched=true
|
||||||
|
if data.get("success") and "result" in data:
|
||||||
|
assert data["result"].get("enriched", False) is False
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass # Not JSON output, that's fine for error cases
|
||||||
|
|
||||||
|
def test_search_enrich_json_output_structure(self, runner):
|
||||||
|
"""Test JSON output structure includes enriched flag."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--json", "--enrich"])
|
||||||
|
# If we get valid JSON output, check structure
|
||||||
|
if result.exit_code == 0:
|
||||||
|
try:
|
||||||
|
data = json.loads(result.output)
|
||||||
|
if data.get("success") and "result" in data:
|
||||||
|
# enriched field should exist
|
||||||
|
assert "enriched" in data["result"]
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass # Not JSON output
|
||||||
|
|
||||||
|
def test_search_enrich_with_mode(self, runner):
|
||||||
|
"""Test --enrich works with different search modes."""
|
||||||
|
modes = ["exact", "fuzzy", "hybrid"]
|
||||||
|
for mode in modes:
|
||||||
|
result = runner.invoke(
|
||||||
|
app, ["search", "test", "--mode", mode, "--enrich"]
|
||||||
|
)
|
||||||
|
# Should not show validation errors
|
||||||
|
assert "Invalid" not in result.output
|
||||||
|
|
||||||
|
|
||||||
|
class TestEnrichFlagBehavior:
|
||||||
|
"""Test behavioral aspects of --enrich flag."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def runner(self):
|
||||||
|
"""Create CLI test runner."""
|
||||||
|
return CliRunner()
|
||||||
|
|
||||||
|
def test_enrich_failure_does_not_break_search(self, runner):
|
||||||
|
"""Test that enrichment failure doesn't prevent search from returning results."""
|
||||||
|
# Even without proper index, search should not crash due to enrich
|
||||||
|
result = runner.invoke(app, ["search", "test", "--enrich", "--verbose"])
|
||||||
|
# Should not have unhandled exception
|
||||||
|
assert "Traceback" not in result.output
|
||||||
|
|
||||||
|
def test_enrich_flag_with_files_only(self, runner):
|
||||||
|
"""Test --enrich is accepted with --files-only mode."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--enrich", "--files-only"])
|
||||||
|
# Should not show option conflict error
|
||||||
|
assert "conflict" not in result.output.lower()
|
||||||
|
|
||||||
|
def test_enrich_flag_with_limit(self, runner):
|
||||||
|
"""Test --enrich works with --limit parameter."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--enrich", "--limit", "5"])
|
||||||
|
# Should not show validation error
|
||||||
|
assert "Invalid" not in result.output
|
||||||
|
|
||||||
|
|
||||||
|
class TestEnrichOutputFormat:
|
||||||
|
"""Test output format with --enrich flag."""
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def runner(self):
|
||||||
|
"""Create CLI test runner."""
|
||||||
|
return CliRunner()
|
||||||
|
|
||||||
|
def test_enrich_verbose_shows_status(self, runner):
|
||||||
|
"""Test verbose mode shows enrichment status."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--enrich", "--verbose"])
|
||||||
|
# Verbose mode may show enrichment info or warnings
|
||||||
|
# Just ensure it doesn't crash
|
||||||
|
assert result.exit_code in [0, 1] # 0 = success, 1 = no index
|
||||||
|
|
||||||
|
def test_json_output_has_enriched_field(self, runner):
|
||||||
|
"""Test JSON output always has enriched field when --enrich used."""
|
||||||
|
result = runner.invoke(app, ["search", "test", "--json", "--enrich"])
|
||||||
|
if result.exit_code == 0:
|
||||||
|
try:
|
||||||
|
data = json.loads(result.output)
|
||||||
|
if data.get("success"):
|
||||||
|
result_data = data.get("result", {})
|
||||||
|
assert "enriched" in result_data
|
||||||
|
assert isinstance(result_data["enriched"], bool)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
pass
|
||||||
234
codex-lens/tests/test_enrichment.py
Normal file
234
codex-lens/tests/test_enrichment.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
"""Tests for search result enrichment with relationship data."""
|
||||||
|
import sqlite3
|
||||||
|
import tempfile
|
||||||
|
import time
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from codexlens.search.enrichment import RelationshipEnricher
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_db():
|
||||||
|
"""Create a mock database with symbols and relationships."""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
db_path = Path(tmpdir) / "_index.db"
|
||||||
|
conn = sqlite3.connect(str(db_path))
|
||||||
|
cursor = conn.cursor()
|
||||||
|
|
||||||
|
# Create schema
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE symbols (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
qualified_name TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
kind TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
start_line INTEGER NOT NULL,
|
||||||
|
end_line INTEGER NOT NULL
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
cursor.execute('''
|
||||||
|
CREATE TABLE symbol_relationships (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
source_symbol_id INTEGER NOT NULL,
|
||||||
|
target_symbol_fqn TEXT NOT NULL,
|
||||||
|
relationship_type TEXT NOT NULL,
|
||||||
|
file_path TEXT NOT NULL,
|
||||||
|
line INTEGER,
|
||||||
|
FOREIGN KEY (source_symbol_id) REFERENCES symbols(id)
|
||||||
|
)
|
||||||
|
''')
|
||||||
|
|
||||||
|
# Insert test data
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbols (qualified_name, name, kind, file_path, start_line, end_line)
|
||||||
|
VALUES ('module.main', 'main', 'function', 'module.py', 1, 10)
|
||||||
|
''')
|
||||||
|
main_id = cursor.lastrowid
|
||||||
|
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbols (qualified_name, name, kind, file_path, start_line, end_line)
|
||||||
|
VALUES ('module.helper', 'helper', 'function', 'module.py', 12, 20)
|
||||||
|
''')
|
||||||
|
helper_id = cursor.lastrowid
|
||||||
|
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbols (qualified_name, name, kind, file_path, start_line, end_line)
|
||||||
|
VALUES ('utils.fetch', 'fetch', 'function', 'utils.py', 1, 5)
|
||||||
|
''')
|
||||||
|
fetch_id = cursor.lastrowid
|
||||||
|
|
||||||
|
# main calls helper
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbol_relationships (source_symbol_id, target_symbol_fqn, relationship_type, file_path, line)
|
||||||
|
VALUES (?, 'helper', 'calls', 'module.py', 5)
|
||||||
|
''', (main_id,))
|
||||||
|
|
||||||
|
# main calls fetch
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbol_relationships (source_symbol_id, target_symbol_fqn, relationship_type, file_path, line)
|
||||||
|
VALUES (?, 'utils.fetch', 'calls', 'module.py', 6)
|
||||||
|
''', (main_id,))
|
||||||
|
|
||||||
|
# helper imports os
|
||||||
|
cursor.execute('''
|
||||||
|
INSERT INTO symbol_relationships (source_symbol_id, target_symbol_fqn, relationship_type, file_path, line)
|
||||||
|
VALUES (?, 'os', 'imports', 'module.py', 13)
|
||||||
|
''', (helper_id,))
|
||||||
|
|
||||||
|
conn.commit()
|
||||||
|
conn.close()
|
||||||
|
|
||||||
|
yield db_path
|
||||||
|
|
||||||
|
|
||||||
|
class TestRelationshipEnricher:
|
||||||
|
"""Test suite for RelationshipEnricher."""
|
||||||
|
|
||||||
|
def test_enrich_with_relationships(self, mock_db):
|
||||||
|
"""Test enriching results with valid relationships."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "module.py", "score": 0.9, "excerpt": "def main():", "symbol": "main"},
|
||||||
|
{"path": "module.py", "score": 0.8, "excerpt": "def helper():", "symbol": "helper"},
|
||||||
|
]
|
||||||
|
|
||||||
|
enriched = enricher.enrich(results, limit=10)
|
||||||
|
|
||||||
|
# Check main's relationships
|
||||||
|
main_result = enriched[0]
|
||||||
|
assert "relationships" in main_result
|
||||||
|
main_rels = main_result["relationships"]
|
||||||
|
assert len(main_rels) >= 2
|
||||||
|
|
||||||
|
# Verify outgoing relationships
|
||||||
|
outgoing = [r for r in main_rels if r["direction"] == "outgoing"]
|
||||||
|
targets = [r["target"] for r in outgoing]
|
||||||
|
assert "helper" in targets or any("helper" in t for t in targets)
|
||||||
|
|
||||||
|
# Check helper's relationships
|
||||||
|
helper_result = enriched[1]
|
||||||
|
assert "relationships" in helper_result
|
||||||
|
helper_rels = helper_result["relationships"]
|
||||||
|
assert len(helper_rels) >= 1
|
||||||
|
|
||||||
|
# Verify incoming relationships (main calls helper)
|
||||||
|
incoming = [r for r in helper_rels if r["direction"] == "incoming"]
|
||||||
|
assert len(incoming) >= 1
|
||||||
|
assert incoming[0]["type"] == "called_by"
|
||||||
|
|
||||||
|
def test_enrich_missing_symbol(self, mock_db):
|
||||||
|
"""Test graceful handling of missing symbols."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "unknown.py", "score": 0.9, "excerpt": "code", "symbol": "nonexistent"},
|
||||||
|
]
|
||||||
|
|
||||||
|
enriched = enricher.enrich(results, limit=10)
|
||||||
|
|
||||||
|
# Should return empty relationships, not crash
|
||||||
|
assert "relationships" in enriched[0]
|
||||||
|
assert enriched[0]["relationships"] == []
|
||||||
|
|
||||||
|
def test_enrich_no_symbol_name(self, mock_db):
|
||||||
|
"""Test handling results without symbol names."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "module.py", "score": 0.9, "excerpt": "code", "symbol": None},
|
||||||
|
]
|
||||||
|
|
||||||
|
enriched = enricher.enrich(results, limit=10)
|
||||||
|
|
||||||
|
assert "relationships" in enriched[0]
|
||||||
|
assert enriched[0]["relationships"] == []
|
||||||
|
|
||||||
|
def test_enrich_performance(self, mock_db):
|
||||||
|
"""Test that enrichment is fast (<100ms for 10 results)."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "module.py", "score": 0.9, "excerpt": f"code{i}", "symbol": "main"}
|
||||||
|
for i in range(10)
|
||||||
|
]
|
||||||
|
|
||||||
|
start = time.perf_counter()
|
||||||
|
enricher.enrich(results, limit=10)
|
||||||
|
elapsed_ms = (time.perf_counter() - start) * 1000
|
||||||
|
|
||||||
|
assert elapsed_ms < 100, f"Enrichment took {elapsed_ms:.1f}ms, expected < 100ms"
|
||||||
|
|
||||||
|
def test_enrich_limit(self, mock_db):
|
||||||
|
"""Test that limit parameter is respected."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "module.py", "score": 0.9, "symbol": "main"},
|
||||||
|
{"path": "module.py", "score": 0.8, "symbol": "helper"},
|
||||||
|
{"path": "utils.py", "score": 0.7, "symbol": "fetch"},
|
||||||
|
]
|
||||||
|
|
||||||
|
# Only enrich first 2
|
||||||
|
enriched = enricher.enrich(results, limit=2)
|
||||||
|
|
||||||
|
assert "relationships" in enriched[0]
|
||||||
|
assert "relationships" in enriched[1]
|
||||||
|
# Third result should NOT have relationships key
|
||||||
|
assert "relationships" not in enriched[2]
|
||||||
|
|
||||||
|
def test_connection_failure_graceful(self):
|
||||||
|
"""Test graceful handling when database doesn't exist."""
|
||||||
|
nonexistent = Path("/nonexistent/path/_index.db")
|
||||||
|
with RelationshipEnricher(nonexistent) as enricher:
|
||||||
|
results = [{"path": "test.py", "score": 0.9, "symbol": "test"}]
|
||||||
|
enriched = enricher.enrich(results)
|
||||||
|
|
||||||
|
# Should return original results without crashing
|
||||||
|
assert len(enriched) == 1
|
||||||
|
|
||||||
|
def test_incoming_type_conversion(self, mock_db):
|
||||||
|
"""Test that relationship types are correctly converted for incoming."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [
|
||||||
|
{"path": "module.py", "score": 0.9, "symbol": "helper"},
|
||||||
|
]
|
||||||
|
|
||||||
|
enriched = enricher.enrich(results)
|
||||||
|
rels = enriched[0]["relationships"]
|
||||||
|
|
||||||
|
incoming = [r for r in rels if r["direction"] == "incoming"]
|
||||||
|
if incoming:
|
||||||
|
# calls should become called_by
|
||||||
|
assert incoming[0]["type"] == "called_by"
|
||||||
|
|
||||||
|
def test_context_manager(self, mock_db):
|
||||||
|
"""Test that context manager properly opens and closes connections."""
|
||||||
|
enricher = RelationshipEnricher(mock_db)
|
||||||
|
assert enricher.db_conn is not None
|
||||||
|
|
||||||
|
enricher.close()
|
||||||
|
assert enricher.db_conn is None
|
||||||
|
|
||||||
|
# Using context manager
|
||||||
|
with RelationshipEnricher(mock_db) as e:
|
||||||
|
assert e.db_conn is not None
|
||||||
|
assert e.db_conn is None
|
||||||
|
|
||||||
|
def test_relationship_data_structure(self, mock_db):
|
||||||
|
"""Test that relationship data has correct structure."""
|
||||||
|
with RelationshipEnricher(mock_db) as enricher:
|
||||||
|
results = [{"path": "module.py", "score": 0.9, "symbol": "main"}]
|
||||||
|
enriched = enricher.enrich(results)
|
||||||
|
|
||||||
|
rels = enriched[0]["relationships"]
|
||||||
|
for rel in rels:
|
||||||
|
# All relationships should have required fields
|
||||||
|
assert "type" in rel
|
||||||
|
assert "direction" in rel
|
||||||
|
assert "file" in rel
|
||||||
|
assert rel["direction"] in ["outgoing", "incoming"]
|
||||||
|
|
||||||
|
# Outgoing should have target, incoming should have source
|
||||||
|
if rel["direction"] == "outgoing":
|
||||||
|
assert "target" in rel
|
||||||
|
else:
|
||||||
|
assert "source" in rel
|
||||||
238
codex-lens/tests/test_symbol_extractor.py
Normal file
238
codex-lens/tests/test_symbol_extractor.py
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
"""Tests for symbol extraction and relationship tracking."""
|
||||||
|
import tempfile
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from codexlens.indexing.symbol_extractor import SymbolExtractor
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def extractor():
|
||||||
|
"""Create a temporary symbol extractor for testing."""
|
||||||
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
|
db_path = Path(tmpdir) / "test.db"
|
||||||
|
ext = SymbolExtractor(db_path)
|
||||||
|
ext.connect()
|
||||||
|
yield ext
|
||||||
|
ext.close()
|
||||||
|
|
||||||
|
|
||||||
|
class TestSymbolExtractor:
|
||||||
|
"""Test suite for SymbolExtractor."""
|
||||||
|
|
||||||
|
def test_database_schema_creation(self, extractor):
|
||||||
|
"""Test that database tables and indexes are created correctly."""
|
||||||
|
cursor = extractor.db_conn.cursor()
|
||||||
|
|
||||||
|
# Check symbols table exists
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='symbols'"
|
||||||
|
)
|
||||||
|
assert cursor.fetchone() is not None
|
||||||
|
|
||||||
|
# Check symbol_relationships table exists
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='symbol_relationships'"
|
||||||
|
)
|
||||||
|
assert cursor.fetchone() is not None
|
||||||
|
|
||||||
|
# Check indexes exist
|
||||||
|
cursor.execute(
|
||||||
|
"SELECT COUNT(*) FROM sqlite_master WHERE type='index' AND name LIKE 'idx_%'"
|
||||||
|
)
|
||||||
|
idx_count = cursor.fetchone()[0]
|
||||||
|
assert idx_count == 5
|
||||||
|
|
||||||
|
def test_python_function_extraction(self, extractor):
|
||||||
|
"""Test extracting functions from Python code."""
|
||||||
|
code = """
|
||||||
|
def hello():
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def world():
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("test.py"), code)
|
||||||
|
|
||||||
|
assert len(symbols) == 2
|
||||||
|
assert symbols[0]["name"] == "hello"
|
||||||
|
assert symbols[0]["kind"] == "function"
|
||||||
|
assert symbols[1]["name"] == "world"
|
||||||
|
assert symbols[1]["kind"] == "function"
|
||||||
|
|
||||||
|
def test_python_class_extraction(self, extractor):
|
||||||
|
"""Test extracting classes from Python code."""
|
||||||
|
code = """
|
||||||
|
class MyClass:
|
||||||
|
pass
|
||||||
|
|
||||||
|
class AnotherClass(BaseClass):
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("test.py"), code)
|
||||||
|
|
||||||
|
assert len(symbols) == 2
|
||||||
|
assert symbols[0]["name"] == "MyClass"
|
||||||
|
assert symbols[0]["kind"] == "class"
|
||||||
|
assert symbols[1]["name"] == "AnotherClass"
|
||||||
|
assert symbols[1]["kind"] == "class"
|
||||||
|
|
||||||
|
def test_typescript_extraction(self, extractor):
|
||||||
|
"""Test extracting symbols from TypeScript code."""
|
||||||
|
code = """
|
||||||
|
export function calculateSum(a: number, b: number): number {
|
||||||
|
return a + b;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Calculator {
|
||||||
|
multiply(x: number, y: number) {
|
||||||
|
return x * y;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("test.ts"), code)
|
||||||
|
|
||||||
|
assert len(symbols) == 2
|
||||||
|
assert symbols[0]["name"] == "calculateSum"
|
||||||
|
assert symbols[0]["kind"] == "function"
|
||||||
|
assert symbols[1]["name"] == "Calculator"
|
||||||
|
assert symbols[1]["kind"] == "class"
|
||||||
|
|
||||||
|
def test_javascript_extraction(self, extractor):
|
||||||
|
"""Test extracting symbols from JavaScript code."""
|
||||||
|
code = """
|
||||||
|
function processData(data) {
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
|
class DataProcessor {
|
||||||
|
transform(input) {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("test.js"), code)
|
||||||
|
|
||||||
|
assert len(symbols) == 2
|
||||||
|
assert symbols[0]["name"] == "processData"
|
||||||
|
assert symbols[1]["name"] == "DataProcessor"
|
||||||
|
|
||||||
|
def test_relationship_extraction(self, extractor):
|
||||||
|
"""Test extracting relationships between symbols."""
|
||||||
|
code = """
|
||||||
|
def helper():
|
||||||
|
pass
|
||||||
|
|
||||||
|
def main():
|
||||||
|
helper()
|
||||||
|
print("done")
|
||||||
|
"""
|
||||||
|
_, relationships = extractor.extract_from_file(Path("test.py"), code)
|
||||||
|
|
||||||
|
# Should find calls to helper and print
|
||||||
|
call_targets = [r["target"] for r in relationships if r["type"] == "calls"]
|
||||||
|
assert "helper" in call_targets
|
||||||
|
|
||||||
|
def test_save_and_query_symbols(self, extractor):
|
||||||
|
"""Test saving symbols to database and querying them."""
|
||||||
|
code = """
|
||||||
|
def test_func():
|
||||||
|
pass
|
||||||
|
|
||||||
|
class TestClass:
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("test.py"), code)
|
||||||
|
name_to_id = extractor.save_symbols(symbols)
|
||||||
|
|
||||||
|
assert len(name_to_id) == 2
|
||||||
|
assert "test_func" in name_to_id
|
||||||
|
assert "TestClass" in name_to_id
|
||||||
|
|
||||||
|
# Query database
|
||||||
|
cursor = extractor.db_conn.cursor()
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM symbols")
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
assert count == 2
|
||||||
|
|
||||||
|
def test_save_relationships(self, extractor):
|
||||||
|
"""Test saving relationships to database."""
|
||||||
|
code = """
|
||||||
|
def caller():
|
||||||
|
callee()
|
||||||
|
|
||||||
|
def callee():
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
symbols, relationships = extractor.extract_from_file(Path("test.py"), code)
|
||||||
|
name_to_id = extractor.save_symbols(symbols)
|
||||||
|
extractor.save_relationships(relationships, name_to_id)
|
||||||
|
|
||||||
|
# Query database
|
||||||
|
cursor = extractor.db_conn.cursor()
|
||||||
|
cursor.execute("SELECT COUNT(*) FROM symbol_relationships")
|
||||||
|
count = cursor.fetchone()[0]
|
||||||
|
assert count > 0
|
||||||
|
|
||||||
|
def test_qualified_name_generation(self, extractor):
|
||||||
|
"""Test that qualified names are generated correctly."""
|
||||||
|
code = """
|
||||||
|
class MyClass:
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
symbols, _ = extractor.extract_from_file(Path("module.py"), code)
|
||||||
|
|
||||||
|
assert symbols[0]["qualified_name"] == "module.MyClass"
|
||||||
|
|
||||||
|
def test_unsupported_language(self, extractor):
|
||||||
|
"""Test that unsupported languages return empty results."""
|
||||||
|
code = "some random code"
|
||||||
|
symbols, relationships = extractor.extract_from_file(Path("test.txt"), code)
|
||||||
|
|
||||||
|
assert len(symbols) == 0
|
||||||
|
assert len(relationships) == 0
|
||||||
|
|
||||||
|
def test_empty_file(self, extractor):
|
||||||
|
"""Test handling empty files."""
|
||||||
|
symbols, relationships = extractor.extract_from_file(Path("test.py"), "")
|
||||||
|
|
||||||
|
assert len(symbols) == 0
|
||||||
|
assert len(relationships) == 0
|
||||||
|
|
||||||
|
def test_complete_workflow(self, extractor):
|
||||||
|
"""Test complete workflow: extract, save, and verify."""
|
||||||
|
code = """
|
||||||
|
class UserService:
|
||||||
|
def get_user(self, user_id):
|
||||||
|
return fetch_user(user_id)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
service = UserService()
|
||||||
|
service.get_user(1)
|
||||||
|
"""
|
||||||
|
file_path = Path("service.py")
|
||||||
|
symbols, relationships = extractor.extract_from_file(file_path, code)
|
||||||
|
|
||||||
|
# Save to database
|
||||||
|
name_to_id = extractor.save_symbols(symbols)
|
||||||
|
extractor.save_relationships(relationships, name_to_id)
|
||||||
|
|
||||||
|
# Verify symbols
|
||||||
|
cursor = extractor.db_conn.cursor()
|
||||||
|
cursor.execute("SELECT name, kind FROM symbols ORDER BY start_line")
|
||||||
|
db_symbols = cursor.fetchall()
|
||||||
|
assert len(db_symbols) == 2
|
||||||
|
assert db_symbols[0][0] == "UserService"
|
||||||
|
assert db_symbols[1][0] == "main"
|
||||||
|
|
||||||
|
# Verify relationships
|
||||||
|
cursor.execute(
|
||||||
|
"""
|
||||||
|
SELECT s.name, r.target_symbol_fqn, r.relationship_type
|
||||||
|
FROM symbol_relationships r
|
||||||
|
JOIN symbols s ON r.source_symbol_id = s.id
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
db_rels = cursor.fetchall()
|
||||||
|
assert len(db_rels) > 0
|
||||||
Reference in New Issue
Block a user