mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-06 01:54:11 +08:00
feat(ccw): add session manager tool with auto workspace detection
- Add session_manager tool for workflow session lifecycle management - Add ccw session CLI command with subcommands: - list, init, status, task, stats, delete, read, write, update, archive, mkdir - Implement auto workspace detection (traverse up to find .workflow) - Implement auto session location detection (active, archived, lite-plan, lite-fix) - Add dashboard notifications for tool executions via WebSocket - Add granular event types (SESSION_CREATED, TASK_UPDATED, etc.) - Add status_history auto-tracking for task status changes - Update workflow session commands to document ccw session usage 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -7,6 +7,7 @@ import { uninstallCommand } from './commands/uninstall.js';
|
||||
import { upgradeCommand } from './commands/upgrade.js';
|
||||
import { listCommand } from './commands/list.js';
|
||||
import { toolCommand } from './commands/tool.js';
|
||||
import { sessionCommand } from './commands/session.js';
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, join } from 'path';
|
||||
@@ -115,5 +116,22 @@ export function run(argv) {
|
||||
.option('--new <text>', 'New text (for edit_file)')
|
||||
.action((subcommand, args, options) => toolCommand(subcommand, args, options));
|
||||
|
||||
// Session command
|
||||
program
|
||||
.command('session [subcommand] [args...]')
|
||||
.description('Workflow session lifecycle management')
|
||||
.option('--location <loc>', 'Location filter: active|archived|both')
|
||||
.option('--type <type>', 'Content type or session type')
|
||||
.option('--content <json>', 'Content for write/update')
|
||||
.option('--task-id <id>', 'Task ID for task content')
|
||||
.option('--filename <name>', 'Filename for process/chat/etc')
|
||||
.option('--dimension <dim>', 'Dimension for review-dim')
|
||||
.option('--iteration <iter>', 'Iteration for review-iter')
|
||||
.option('--subdir <dir>', 'Subdirectory for mkdir')
|
||||
.option('--raw', 'Output raw content only')
|
||||
.option('--no-metadata', 'Exclude metadata from list')
|
||||
.option('--no-update-status', 'Skip status update on archive')
|
||||
.action((subcommand, args, options) => sessionCommand(subcommand, args, options));
|
||||
|
||||
program.parse(argv);
|
||||
}
|
||||
|
||||
697
ccw/src/commands/session.js
Normal file
697
ccw/src/commands/session.js
Normal file
@@ -0,0 +1,697 @@
|
||||
/**
|
||||
* Session Command - Workflow session lifecycle management
|
||||
* Adapter for session_manager tool providing direct CLI access
|
||||
*/
|
||||
|
||||
import chalk from 'chalk';
|
||||
import http from 'http';
|
||||
import { executeTool } from '../tools/index.js';
|
||||
|
||||
/**
|
||||
* Notify dashboard of granular events (fire and forget)
|
||||
* @param {Object} data - Event data
|
||||
*/
|
||||
function notifyDashboard(data) {
|
||||
const DASHBOARD_PORT = process.env.CCW_PORT || 3456;
|
||||
const payload = JSON.stringify({
|
||||
...data,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
const req = http.request({
|
||||
hostname: 'localhost',
|
||||
port: DASHBOARD_PORT,
|
||||
path: '/api/hook',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(payload)
|
||||
}
|
||||
});
|
||||
|
||||
// Fire and forget - log errors only in debug mode
|
||||
req.on('error', (err) => {
|
||||
if (process.env.DEBUG) console.error('[Dashboard] Notification failed:', err.message);
|
||||
});
|
||||
req.write(payload);
|
||||
req.end();
|
||||
}
|
||||
|
||||
/**
|
||||
* List sessions
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function listAction(options) {
|
||||
const params = {
|
||||
operation: 'list',
|
||||
location: options.location || 'both',
|
||||
include_metadata: options.metadata !== false
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { active = [], archived = [], total } = result.result;
|
||||
|
||||
console.log(chalk.bold.cyan('\nWorkflow Sessions\n'));
|
||||
|
||||
if (active.length > 0) {
|
||||
console.log(chalk.bold.white('Active Sessions:'));
|
||||
for (const session of active) {
|
||||
const meta = session.metadata || {};
|
||||
console.log(chalk.green(` [ACTIVE] ${session.session_id}`));
|
||||
if (meta.description) console.log(chalk.gray(` ${meta.description}`));
|
||||
if (meta.status) console.log(chalk.gray(` Status: ${meta.status}`));
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
|
||||
if (archived.length > 0) {
|
||||
console.log(chalk.bold.white('Archived Sessions:'));
|
||||
for (const session of archived) {
|
||||
const meta = session.metadata || {};
|
||||
console.log(chalk.blue(` [ARCHIVED] ${session.session_id}`));
|
||||
if (meta.description) console.log(chalk.gray(` ${meta.description}`));
|
||||
}
|
||||
console.log();
|
||||
}
|
||||
|
||||
if (total === 0) {
|
||||
console.log(chalk.yellow('No sessions found'));
|
||||
} else {
|
||||
console.log(chalk.gray(`Total: ${total} session(s)`));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a new session
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function initAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session init <session_id> [--type <type>]'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'init',
|
||||
session_id: sessionId,
|
||||
session_type: options.type || 'workflow'
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit SESSION_CREATED event
|
||||
notifyDashboard({
|
||||
type: 'SESSION_CREATED',
|
||||
sessionId: sessionId,
|
||||
payload: result.result
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Session "${sessionId}" initialized`));
|
||||
console.log(chalk.gray(` Location: ${result.result.path}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Read session content
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function readAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session read <session_id> --type <content_type>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'read',
|
||||
session_id: sessionId,
|
||||
content_type: options.type || 'session'
|
||||
};
|
||||
|
||||
// Add path_params if provided
|
||||
if (options.taskId) params.path_params = { ...params.path_params, task_id: options.taskId };
|
||||
if (options.filename) params.path_params = { ...params.path_params, filename: options.filename };
|
||||
if (options.dimension) params.path_params = { ...params.path_params, dimension: options.dimension };
|
||||
if (options.iteration) params.path_params = { ...params.path_params, iteration: options.iteration };
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Output raw content for piping
|
||||
if (options.raw) {
|
||||
console.log(typeof result.result.content === 'string'
|
||||
? result.result.content
|
||||
: JSON.stringify(result.result.content, null, 2));
|
||||
} else {
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write session content
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function writeAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session write <session_id> --type <content_type> --content <json>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!options.content) {
|
||||
console.error(chalk.red('Content is required (--content)'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let content;
|
||||
try {
|
||||
content = JSON.parse(options.content);
|
||||
} catch {
|
||||
// If not JSON, treat as string content
|
||||
content = options.content;
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'write',
|
||||
session_id: sessionId,
|
||||
content_type: options.type || 'session',
|
||||
content
|
||||
};
|
||||
|
||||
// Add path_params if provided
|
||||
if (options.taskId) params.path_params = { ...params.path_params, task_id: options.taskId };
|
||||
if (options.filename) params.path_params = { ...params.path_params, filename: options.filename };
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit granular event based on content_type
|
||||
const contentType = params.content_type;
|
||||
let eventType = 'CONTENT_WRITTEN';
|
||||
let entityId = null;
|
||||
|
||||
switch (contentType) {
|
||||
case 'task':
|
||||
eventType = 'TASK_CREATED';
|
||||
entityId = options.taskId || content.task_id;
|
||||
break;
|
||||
case 'summary':
|
||||
eventType = 'SUMMARY_WRITTEN';
|
||||
entityId = options.taskId;
|
||||
break;
|
||||
case 'plan':
|
||||
eventType = 'PLAN_UPDATED';
|
||||
break;
|
||||
case 'review-dim':
|
||||
eventType = 'REVIEW_UPDATED';
|
||||
entityId = options.dimension;
|
||||
break;
|
||||
case 'review-iter':
|
||||
eventType = 'REVIEW_UPDATED';
|
||||
entityId = options.iteration;
|
||||
break;
|
||||
case 'review-fix':
|
||||
eventType = 'REVIEW_UPDATED';
|
||||
entityId = options.filename;
|
||||
break;
|
||||
case 'session':
|
||||
eventType = 'SESSION_UPDATED';
|
||||
break;
|
||||
}
|
||||
|
||||
notifyDashboard({
|
||||
type: eventType,
|
||||
sessionId: sessionId,
|
||||
entityId: entityId,
|
||||
contentType: contentType,
|
||||
payload: result.result.written_content || content
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Content written to ${result.result.path}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session content (merge)
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function updateAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session update <session_id> --content <json>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!options.content) {
|
||||
console.error(chalk.red('Content is required (--content)'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let content;
|
||||
try {
|
||||
content = JSON.parse(options.content);
|
||||
} catch (e) {
|
||||
console.error(chalk.red('Content must be valid JSON for update operation'));
|
||||
console.error(chalk.gray(`Parse error: ${e.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'update',
|
||||
session_id: sessionId,
|
||||
content_type: options.type || 'session',
|
||||
content
|
||||
};
|
||||
|
||||
// Add path_params if task update
|
||||
if (options.taskId) params.path_params = { task_id: options.taskId };
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit granular event based on content_type
|
||||
const eventType = params.content_type === 'task' ? 'TASK_UPDATED' : 'SESSION_UPDATED';
|
||||
notifyDashboard({
|
||||
type: eventType,
|
||||
sessionId: sessionId,
|
||||
entityId: options.taskId || null,
|
||||
payload: result.result.merged_data || content
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Session "${sessionId}" updated`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Archive a session
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function archiveAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session archive <session_id>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'archive',
|
||||
session_id: sessionId,
|
||||
update_status: options.updateStatus !== false
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit SESSION_ARCHIVED event
|
||||
notifyDashboard({
|
||||
type: 'SESSION_ARCHIVED',
|
||||
sessionId: sessionId,
|
||||
payload: result.result
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Session "${sessionId}" archived`));
|
||||
console.log(chalk.gray(` Location: ${result.result.destination}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update session status (shortcut)
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {string} newStatus - New status value
|
||||
*/
|
||||
async function statusAction(sessionId, newStatus) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session status <session_id> <status>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!newStatus) {
|
||||
console.error(chalk.red('Status is required'));
|
||||
console.error(chalk.gray('Valid statuses: planning, active, implementing, reviewing, completed, paused'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const validStatuses = ['planning', 'active', 'implementing', 'reviewing', 'completed', 'paused'];
|
||||
if (!validStatuses.includes(newStatus)) {
|
||||
console.error(chalk.red(`Invalid status: ${newStatus}`));
|
||||
console.error(chalk.gray(`Valid statuses: ${validStatuses.join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'update',
|
||||
session_id: sessionId,
|
||||
content_type: 'session',
|
||||
content: { status: newStatus, updated_at: new Date().toISOString() }
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit SESSION_UPDATED event
|
||||
notifyDashboard({
|
||||
type: 'SESSION_UPDATED',
|
||||
sessionId: sessionId,
|
||||
payload: { status: newStatus }
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Session "${sessionId}" status → ${newStatus}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Update task status (shortcut)
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {string} taskId - Task ID
|
||||
* @param {string} newStatus - New status value
|
||||
*/
|
||||
async function taskAction(sessionId, taskId, newStatus) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session task <session_id> <task_id> <status>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!taskId) {
|
||||
console.error(chalk.red('Task ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session task <session_id> <task_id> <status>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!newStatus) {
|
||||
console.error(chalk.red('Status is required'));
|
||||
console.error(chalk.gray('Valid statuses: pending, in_progress, completed, blocked, cancelled'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const validStatuses = ['pending', 'in_progress', 'completed', 'blocked', 'cancelled'];
|
||||
if (!validStatuses.includes(newStatus)) {
|
||||
console.error(chalk.red(`Invalid status: ${newStatus}`));
|
||||
console.error(chalk.gray(`Valid statuses: ${validStatuses.join(', ')}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// First, read the current task to get existing status
|
||||
const readParams = {
|
||||
operation: 'read',
|
||||
session_id: sessionId,
|
||||
content_type: 'task',
|
||||
path_params: { task_id: taskId }
|
||||
};
|
||||
|
||||
const readResult = await executeTool('session_manager', readParams);
|
||||
|
||||
let currentTask = {};
|
||||
let oldStatus = 'unknown';
|
||||
|
||||
if (readResult.success) {
|
||||
currentTask = readResult.result.content || {};
|
||||
oldStatus = currentTask.status || 'unknown';
|
||||
}
|
||||
|
||||
// Build status history entry
|
||||
const historyEntry = {
|
||||
from: oldStatus,
|
||||
to: newStatus,
|
||||
changed_at: new Date().toISOString()
|
||||
};
|
||||
|
||||
// Update task with new status and appended history
|
||||
const params = {
|
||||
operation: 'update',
|
||||
session_id: sessionId,
|
||||
content_type: 'task',
|
||||
path_params: { task_id: taskId },
|
||||
content: {
|
||||
status: newStatus,
|
||||
updated_at: new Date().toISOString(),
|
||||
status_history: [...(currentTask.status_history || []), historyEntry]
|
||||
}
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit TASK_UPDATED event
|
||||
notifyDashboard({
|
||||
type: 'TASK_UPDATED',
|
||||
sessionId: sessionId,
|
||||
entityId: taskId,
|
||||
payload: { status: newStatus }
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ Task "${taskId}" status → ${newStatus}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create directory within session
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
async function mkdirAction(sessionId, options) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session mkdir <session_id> --subdir <subdir>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!options.subdir) {
|
||||
console.error(chalk.red('Subdirectory is required (--subdir)'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'mkdir',
|
||||
session_id: sessionId,
|
||||
dirs: [options.subdir] // Convert single subdir to array
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(chalk.green(`✓ Directory created: ${result.result.directories_created.join(', ')}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute raw operation (advanced)
|
||||
* @param {string} jsonParams - JSON parameters
|
||||
*/
|
||||
|
||||
/**
|
||||
* Delete file within session
|
||||
* @param {string} sessionId - Session ID
|
||||
* @param {string} filePath - Relative file path
|
||||
*/
|
||||
async function deleteAction(sessionId, filePath) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session delete <session_id> <file_path>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!filePath) {
|
||||
console.error(chalk.red('File path is required'));
|
||||
console.error(chalk.gray('Usage: ccw session delete <session_id> <file_path>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'delete',
|
||||
session_id: sessionId,
|
||||
file_path: filePath
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Emit FILE_DELETED event
|
||||
notifyDashboard({
|
||||
type: 'FILE_DELETED',
|
||||
sessionId: sessionId,
|
||||
payload: { file_path: filePath }
|
||||
});
|
||||
|
||||
console.log(chalk.green(`✓ File deleted: ${result.result.deleted}`));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session statistics
|
||||
* @param {string} sessionId - Session ID
|
||||
*/
|
||||
async function statsAction(sessionId, options = {}) {
|
||||
if (!sessionId) {
|
||||
console.error(chalk.red('Session ID is required'));
|
||||
console.error(chalk.gray('Usage: ccw session stats <session_id>'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const params = {
|
||||
operation: 'stats',
|
||||
session_id: sessionId
|
||||
};
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
|
||||
if (!result.success) {
|
||||
console.error(chalk.red(`Error: ${result.error}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { tasks, summaries, has_plan, location } = result.result;
|
||||
|
||||
console.log(chalk.bold.cyan(`\nSession Statistics: ${sessionId}`));
|
||||
console.log(chalk.gray(`Location: ${location}\n`));
|
||||
|
||||
console.log(chalk.bold.white('Tasks:'));
|
||||
console.log(chalk.gray(` Total: ${tasks.total}`));
|
||||
console.log(chalk.green(` Completed: ${tasks.completed}`));
|
||||
console.log(chalk.yellow(` In Progress: ${tasks.in_progress}`));
|
||||
console.log(chalk.blue(` Pending: ${tasks.pending}`));
|
||||
console.log(chalk.red(` Blocked: ${tasks.blocked}`));
|
||||
console.log(chalk.gray(` Cancelled: ${tasks.cancelled}\n`));
|
||||
|
||||
console.log(chalk.bold.white('Documentation:'));
|
||||
console.log(chalk.gray(` Summaries: ${summaries}`));
|
||||
console.log(chalk.gray(` Plan: ${has_plan ? 'Yes' : 'No'}`));
|
||||
}
|
||||
async function execAction(jsonParams) {
|
||||
if (!jsonParams) {
|
||||
console.error(chalk.red('JSON parameters required'));
|
||||
console.error(chalk.gray('Usage: ccw session exec \'{"operation":"list","location":"active"}\''));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let params;
|
||||
try {
|
||||
params = JSON.parse(jsonParams);
|
||||
} catch (e) {
|
||||
console.error(chalk.red('Invalid JSON'));
|
||||
console.error(chalk.gray(`Parse error: ${e.message}`));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const result = await executeTool('session_manager', params);
|
||||
console.log(JSON.stringify(result, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
* Session command entry point
|
||||
* @param {string} subcommand - Subcommand
|
||||
* @param {string[]} args - Arguments
|
||||
* @param {Object} options - CLI options
|
||||
*/
|
||||
export async function sessionCommand(subcommand, args, options) {
|
||||
const argsArray = Array.isArray(args) ? args : (args ? [args] : []);
|
||||
|
||||
switch (subcommand) {
|
||||
case 'list':
|
||||
await listAction(options);
|
||||
break;
|
||||
case 'init':
|
||||
await initAction(argsArray[0], options);
|
||||
break;
|
||||
case 'read':
|
||||
await readAction(argsArray[0], options);
|
||||
break;
|
||||
case 'write':
|
||||
await writeAction(argsArray[0], options);
|
||||
break;
|
||||
case 'update':
|
||||
await updateAction(argsArray[0], options);
|
||||
break;
|
||||
case 'archive':
|
||||
await archiveAction(argsArray[0], options);
|
||||
break;
|
||||
case 'status':
|
||||
await statusAction(argsArray[0], argsArray[1]);
|
||||
break;
|
||||
case 'task':
|
||||
await taskAction(argsArray[0], argsArray[1], argsArray[2]);
|
||||
break;
|
||||
case 'mkdir':
|
||||
await mkdirAction(argsArray[0], options);
|
||||
break;
|
||||
case 'delete':
|
||||
await deleteAction(argsArray[0], argsArray[1]);
|
||||
break;
|
||||
case 'stats':
|
||||
await statsAction(argsArray[0], options);
|
||||
break;
|
||||
case 'exec':
|
||||
await execAction(argsArray[0]);
|
||||
break;
|
||||
default:
|
||||
console.log(chalk.bold.cyan('\nCCW Session Management\n'));
|
||||
console.log('Subcommands:');
|
||||
console.log(chalk.gray(' list List all sessions'));
|
||||
console.log(chalk.gray(' init <session_id> Initialize new session'));
|
||||
console.log(chalk.gray(' status <session_id> <status> Update session status'));
|
||||
console.log(chalk.gray(' task <session_id> <task_id> <status> Update task status'));
|
||||
console.log(chalk.gray(' stats <session_id> Get session statistics'));
|
||||
console.log(chalk.gray(' delete <session_id> <file_path> Delete file within session'));
|
||||
console.log(chalk.gray(' read <session_id> Read session content'));
|
||||
console.log(chalk.gray(' write <session_id> Write session content'));
|
||||
console.log(chalk.gray(' update <session_id> Update session (merge)'));
|
||||
console.log(chalk.gray(' archive <session_id> Archive session'));
|
||||
console.log(chalk.gray(' mkdir <session_id> Create subdirectory'));
|
||||
console.log(chalk.gray(' exec <json> Execute raw operation'));
|
||||
console.log();
|
||||
console.log('Status Values:');
|
||||
console.log(chalk.gray(' Session: planning, active, implementing, reviewing, completed, paused'));
|
||||
console.log(chalk.gray(' Task: pending, in_progress, completed, blocked, cancelled'));
|
||||
console.log();
|
||||
console.log('Examples:');
|
||||
console.log(chalk.gray(' ccw session list'));
|
||||
console.log(chalk.gray(' ccw session init WFS-my-feature'));
|
||||
console.log(chalk.gray(' ccw session status WFS-my-feature active'));
|
||||
console.log(chalk.gray(' ccw session task WFS-my-feature IMPL-001 completed'));
|
||||
console.log(chalk.gray(' ccw session stats WFS-my-feature'));
|
||||
console.log(chalk.gray(' ccw session delete WFS-my-feature .archiving'));
|
||||
console.log(chalk.gray(' ccw session archive WFS-my-feature'));
|
||||
}
|
||||
}
|
||||
@@ -354,7 +354,7 @@ export async function startServer(options = {}) {
|
||||
// API: Hook endpoint for Claude Code notifications
|
||||
if (pathname === '/api/hook' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { type, filePath, sessionId } = body;
|
||||
const { type, filePath, sessionId, ...extraData } = body;
|
||||
|
||||
// Determine session ID from file path if not provided
|
||||
let resolvedSessionId = sessionId;
|
||||
@@ -368,7 +368,8 @@ export async function startServer(options = {}) {
|
||||
payload: {
|
||||
sessionId: resolvedSessionId,
|
||||
filePath: filePath,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
...extraData // Pass through toolName, status, result, params, error, etc.
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -60,11 +60,91 @@ function handleNotification(data) {
|
||||
}
|
||||
break;
|
||||
|
||||
case 'SESSION_CREATED':
|
||||
case 'SESSION_ARCHIVED':
|
||||
case 'TASK_UPDATED':
|
||||
case 'SESSION_UPDATED':
|
||||
case 'TASK_CREATED':
|
||||
case 'SUMMARY_WRITTEN':
|
||||
case 'PLAN_UPDATED':
|
||||
case 'REVIEW_UPDATED':
|
||||
case 'CONTENT_WRITTEN':
|
||||
// Route to state reducer for granular updates
|
||||
if (typeof handleWorkflowEvent === 'function') {
|
||||
handleWorkflowEvent({ type, ...payload });
|
||||
} else {
|
||||
// Fallback to full refresh if reducer not available
|
||||
refreshIfNeeded();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'tool_execution':
|
||||
// Handle tool execution notifications from CLI
|
||||
handleToolExecutionNotification(payload);
|
||||
break;
|
||||
|
||||
default:
|
||||
console.log('[WS] Unknown notification type:', type);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tool execution notifications from CLI
|
||||
* @param {Object} payload - Tool execution payload
|
||||
*/
|
||||
function handleToolExecutionNotification(payload) {
|
||||
const { toolName, status, params, result, error, timestamp } = payload;
|
||||
|
||||
// Determine notification type and message
|
||||
let notifType = 'info';
|
||||
let message = `Tool: ${toolName}`;
|
||||
let details = null;
|
||||
|
||||
switch (status) {
|
||||
case 'started':
|
||||
notifType = 'info';
|
||||
message = `Executing ${toolName}...`;
|
||||
if (params) {
|
||||
// Show truncated params
|
||||
const paramStr = JSON.stringify(params);
|
||||
details = paramStr.length > 100 ? paramStr.substring(0, 100) + '...' : paramStr;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'completed':
|
||||
notifType = 'success';
|
||||
message = `${toolName} completed`;
|
||||
if (result) {
|
||||
// Show truncated result
|
||||
if (result._truncated) {
|
||||
details = result.preview;
|
||||
} else {
|
||||
const resultStr = JSON.stringify(result);
|
||||
details = resultStr.length > 150 ? resultStr.substring(0, 150) + '...' : resultStr;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'failed':
|
||||
notifType = 'error';
|
||||
message = `${toolName} failed`;
|
||||
details = error || 'Unknown error';
|
||||
break;
|
||||
|
||||
default:
|
||||
notifType = 'info';
|
||||
message = `${toolName}: ${status}`;
|
||||
}
|
||||
|
||||
// Add to global notifications
|
||||
if (typeof addGlobalNotification === 'function') {
|
||||
addGlobalNotification(notifType, message, details, 'CLI');
|
||||
}
|
||||
|
||||
// Log to console
|
||||
console.log(`[CLI] ${status}: ${toolName}`, payload);
|
||||
}
|
||||
|
||||
// ========== Auto Refresh ==========
|
||||
function initAutoRefresh() {
|
||||
// Calculate initial hash
|
||||
|
||||
@@ -39,4 +39,153 @@ const taskJsonStore = {};
|
||||
// ========== Global Notification Queue ==========
|
||||
// Notification queue visible from any view
|
||||
let globalNotificationQueue = [];
|
||||
let isNotificationPanelVisible = false;
|
||||
let isNotificationPanelVisible = false;
|
||||
// ========== Event Handler ==========
|
||||
/**
|
||||
* Handle granular workflow events from CLI
|
||||
* @param {Object} event - Event object with type, sessionId, payload
|
||||
*/
|
||||
function handleWorkflowEvent(event) {
|
||||
const { type, payload, sessionId, entityId } = event;
|
||||
|
||||
switch(type) {
|
||||
case 'SESSION_CREATED':
|
||||
// Add to activeSessions array
|
||||
if (payload) {
|
||||
const sessionData = {
|
||||
session_id: sessionId,
|
||||
...(payload.metadata || { status: 'planning', created_at: new Date().toISOString() }),
|
||||
location: 'active'
|
||||
};
|
||||
|
||||
// Add to store
|
||||
const key = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
sessionDataStore[key] = sessionData;
|
||||
|
||||
// Add to workflowData
|
||||
if (!workflowData.activeSessions) workflowData.activeSessions = [];
|
||||
workflowData.activeSessions.push(sessionData);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'SESSION_ARCHIVED':
|
||||
// Move from active to archived
|
||||
if (!workflowData.activeSessions) workflowData.activeSessions = [];
|
||||
if (!workflowData.archivedSessions) workflowData.archivedSessions = [];
|
||||
|
||||
const activeIndex = workflowData.activeSessions.findIndex(s => s.session_id === sessionId);
|
||||
if (activeIndex !== -1) {
|
||||
const session = workflowData.activeSessions.splice(activeIndex, 1)[0];
|
||||
session.location = 'archived';
|
||||
if (payload && payload.metadata) {
|
||||
Object.assign(session, payload.metadata);
|
||||
}
|
||||
workflowData.archivedSessions.push(session);
|
||||
|
||||
// Update store
|
||||
const key = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
sessionDataStore[key] = session;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'TASK_UPDATED':
|
||||
// Find task in session and merge payload
|
||||
const taskSessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const taskSession = sessionDataStore[taskSessionKey];
|
||||
if (taskSession && taskSession.tasks) {
|
||||
const task = taskSession.tasks.find(t => t.task_id === entityId);
|
||||
if (task && payload) {
|
||||
Object.assign(task, payload);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'SESSION_UPDATED':
|
||||
// Update session metadata
|
||||
const sessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const session = sessionDataStore[sessionKey];
|
||||
if (session && payload) {
|
||||
Object.assign(session, payload);
|
||||
|
||||
// Update in workflowData arrays
|
||||
const activeSession = workflowData.activeSessions?.find(s => s.session_id === sessionId);
|
||||
const archivedSession = workflowData.archivedSessions?.find(s => s.session_id === sessionId);
|
||||
if (activeSession) Object.assign(activeSession, payload);
|
||||
if (archivedSession) Object.assign(archivedSession, payload);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'TASK_CREATED':
|
||||
// Add new task to session
|
||||
const tcSessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const tcSession = sessionDataStore[tcSessionKey];
|
||||
if (tcSession) {
|
||||
if (!tcSession.tasks) tcSession.tasks = [];
|
||||
// Check if task already exists (by entityId or task_id in payload)
|
||||
const taskId = entityId || (payload && payload.task_id);
|
||||
const existingTask = tcSession.tasks.find(t => t.task_id === taskId);
|
||||
if (!existingTask && payload) {
|
||||
tcSession.tasks.push(payload);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'SUMMARY_WRITTEN':
|
||||
// Update session summary count or mark task as having summary
|
||||
const swSessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const swSession = sessionDataStore[swSessionKey];
|
||||
if (swSession) {
|
||||
if (!swSession.summaries) swSession.summaries = [];
|
||||
swSession.summaries.push({ task_id: entityId, content: payload });
|
||||
// Update task status if found
|
||||
if (swSession.tasks && entityId) {
|
||||
const task = swSession.tasks.find(t => t.task_id === entityId);
|
||||
if (task) task.has_summary = true;
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'PLAN_UPDATED':
|
||||
// Update session plan reference
|
||||
const puSessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const puSession = sessionDataStore[puSessionKey];
|
||||
if (puSession) {
|
||||
puSession.has_plan = true;
|
||||
puSession.plan_updated_at = new Date().toISOString();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'REVIEW_UPDATED':
|
||||
// Update session review data
|
||||
const ruSessionKey = `session-${sessionId}`.replace(/[^a-zA-Z0-9-]/g, '-');
|
||||
const ruSession = sessionDataStore[ruSessionKey];
|
||||
if (ruSession) {
|
||||
if (!ruSession.review) ruSession.review = { dimensions: [], iterations: [], fixes: [] };
|
||||
// Track review updates by type based on entityId pattern (prevent duplicates)
|
||||
if (event.contentType === 'review-dim') {
|
||||
if (!ruSession.review.dimensions.includes(entityId)) ruSession.review.dimensions.push(entityId);
|
||||
} else if (event.contentType === 'review-iter') {
|
||||
if (!ruSession.review.iterations.includes(entityId)) ruSession.review.iterations.push(entityId);
|
||||
} else if (event.contentType === 'review-fix') {
|
||||
if (!ruSession.review.fixes.includes(entityId)) ruSession.review.fixes.push(entityId);
|
||||
}
|
||||
ruSession.has_review = true;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'CONTENT_WRITTEN':
|
||||
// Generic content write - just log for debugging
|
||||
console.log(`[State] Content written: ${event.contentType} for ${sessionId}`);
|
||||
break;
|
||||
}
|
||||
|
||||
// Trigger UI updates
|
||||
if (typeof updateStats === 'function') updateStats();
|
||||
if (typeof updateBadges === 'function') updateBadges();
|
||||
if (typeof updateCarousel === 'function') updateCarousel();
|
||||
|
||||
// Re-render current view if needed
|
||||
if (currentView === 'sessions' && typeof renderSessions === 'function') {
|
||||
renderSessions();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
* Provides tool discovery, validation, and execution
|
||||
*/
|
||||
|
||||
import http from 'http';
|
||||
import { editFileTool } from './edit-file.js';
|
||||
import { getModulesByDepthTool } from './get-modules-by-depth.js';
|
||||
import { classifyFoldersTool } from './classify-folders.js';
|
||||
@@ -13,10 +14,44 @@ import { uiGeneratePreviewTool } from './ui-generate-preview.js';
|
||||
import { uiInstantiatePrototypesTool } from './ui-instantiate-prototypes.js';
|
||||
import { updateModuleClaudeTool } from './update-module-claude.js';
|
||||
import { convertTokensToCssTool } from './convert-tokens-to-css.js';
|
||||
import { sessionManagerTool } from './session-manager.js';
|
||||
|
||||
// Tool registry - add new tools here
|
||||
const tools = new Map();
|
||||
|
||||
// Dashboard notification settings
|
||||
const DASHBOARD_PORT = process.env.CCW_PORT || 3456;
|
||||
|
||||
/**
|
||||
* Notify dashboard of tool execution events (fire and forget)
|
||||
* @param {Object} data - Notification data
|
||||
*/
|
||||
function notifyDashboard(data) {
|
||||
const payload = JSON.stringify({
|
||||
type: 'tool_execution',
|
||||
...data,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
const req = http.request({
|
||||
hostname: 'localhost',
|
||||
port: DASHBOARD_PORT,
|
||||
path: '/api/hook',
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Content-Length': Buffer.byteLength(payload)
|
||||
}
|
||||
});
|
||||
|
||||
// Fire and forget - log errors only in debug mode
|
||||
req.on('error', (err) => {
|
||||
if (process.env.DEBUG) console.error('[Dashboard] Tool notification failed:', err.message);
|
||||
});
|
||||
req.write(payload);
|
||||
req.end();
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a tool in the registry
|
||||
* @param {Object} tool - Tool definition
|
||||
@@ -117,14 +152,36 @@ export async function executeTool(name, params = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
// Notify dashboard - execution started
|
||||
notifyDashboard({
|
||||
toolName: name,
|
||||
status: 'started',
|
||||
params: sanitizeParams(params)
|
||||
});
|
||||
|
||||
// Execute tool
|
||||
try {
|
||||
const result = await tool.execute(params);
|
||||
|
||||
// Notify dashboard - execution completed
|
||||
notifyDashboard({
|
||||
toolName: name,
|
||||
status: 'completed',
|
||||
result: sanitizeResult(result)
|
||||
});
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result
|
||||
};
|
||||
} catch (error) {
|
||||
// Notify dashboard - execution failed
|
||||
notifyDashboard({
|
||||
toolName: name,
|
||||
status: 'failed',
|
||||
error: error.message || 'Tool execution failed'
|
||||
});
|
||||
|
||||
return {
|
||||
success: false,
|
||||
error: error.message || 'Tool execution failed'
|
||||
@@ -132,6 +189,35 @@ export async function executeTool(name, params = {}) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize params for notification (truncate large values)
|
||||
*/
|
||||
function sanitizeParams(params) {
|
||||
const sanitized = {};
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
if (typeof value === 'string' && value.length > 200) {
|
||||
sanitized[key] = value.substring(0, 200) + '...';
|
||||
} else if (typeof value === 'object' && value !== null) {
|
||||
sanitized[key] = '[Object]';
|
||||
} else {
|
||||
sanitized[key] = value;
|
||||
}
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sanitize result for notification (truncate large values)
|
||||
*/
|
||||
function sanitizeResult(result) {
|
||||
if (result === null || result === undefined) return result;
|
||||
const str = JSON.stringify(result);
|
||||
if (str.length > 500) {
|
||||
return { _truncated: true, preview: str.substring(0, 500) + '...' };
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool schema in MCP-compatible format
|
||||
* @param {string} name - Tool name
|
||||
@@ -171,6 +257,7 @@ registerTool(uiGeneratePreviewTool);
|
||||
registerTool(uiInstantiatePrototypesTool);
|
||||
registerTool(updateModuleClaudeTool);
|
||||
registerTool(convertTokensToCssTool);
|
||||
registerTool(sessionManagerTool);
|
||||
|
||||
// Export for external tool registration
|
||||
export { registerTool };
|
||||
|
||||
799
ccw/src/tools/session-manager.js
Normal file
799
ccw/src/tools/session-manager.js
Normal file
@@ -0,0 +1,799 @@
|
||||
/**
|
||||
* Session Manager Tool - Workflow session lifecycle management
|
||||
* Operations: init, list, read, write, update, archive, mkdir
|
||||
* Content routing via content_type + path_params
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync, existsSync, readdirSync, mkdirSync, renameSync, rmSync, copyFileSync, statSync } from 'fs';
|
||||
import { resolve, join, dirname, basename } from 'path';
|
||||
|
||||
// Base paths for session storage
|
||||
const WORKFLOW_BASE = '.workflow';
|
||||
const ACTIVE_BASE = '.workflow/active';
|
||||
const ARCHIVE_BASE = '.workflow/archives';
|
||||
const LITE_PLAN_BASE = '.workflow/.lite-plan';
|
||||
const LITE_FIX_BASE = '.workflow/.lite-fix';
|
||||
|
||||
// Session ID validation pattern (alphanumeric, hyphen, underscore)
|
||||
const SESSION_ID_PATTERN = /^[a-zA-Z0-9_-]+$/;
|
||||
|
||||
// Cached workflow root (computed once per execution)
|
||||
let cachedWorkflowRoot = null;
|
||||
|
||||
/**
|
||||
* Find project root by traversing up looking for .workflow directory
|
||||
* Falls back to cwd if not found
|
||||
*/
|
||||
function findWorkflowRoot() {
|
||||
if (cachedWorkflowRoot) return cachedWorkflowRoot;
|
||||
|
||||
let dir = process.cwd();
|
||||
const root = dirname(dir) === dir ? dir : null; // filesystem root
|
||||
|
||||
while (dir && dir !== root) {
|
||||
if (existsSync(join(dir, WORKFLOW_BASE))) {
|
||||
cachedWorkflowRoot = dir;
|
||||
return dir;
|
||||
}
|
||||
const parent = dirname(dir);
|
||||
if (parent === dir) break; // reached filesystem root
|
||||
dir = parent;
|
||||
}
|
||||
|
||||
// Fallback to cwd (for init operation)
|
||||
cachedWorkflowRoot = process.cwd();
|
||||
return cachedWorkflowRoot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate session ID format
|
||||
*/
|
||||
function validateSessionId(sessionId) {
|
||||
if (!sessionId || typeof sessionId !== 'string') {
|
||||
throw new Error('session_id must be a non-empty string');
|
||||
}
|
||||
if (!SESSION_ID_PATTERN.test(sessionId)) {
|
||||
throw new Error(`Invalid session_id format: "${sessionId}". Only alphanumeric, hyphen, and underscore allowed.`);
|
||||
}
|
||||
if (sessionId.length > 100) {
|
||||
throw new Error('session_id must be 100 characters or less');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate path params to prevent path traversal
|
||||
*/
|
||||
function validatePathParams(pathParams) {
|
||||
for (const [key, value] of Object.entries(pathParams)) {
|
||||
if (typeof value !== 'string') continue;
|
||||
if (value.includes('..') || value.includes('/') || value.includes('\\')) {
|
||||
throw new Error(`Invalid path_params.${key}: path traversal characters not allowed`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Content type to file path routing
|
||||
* {base} is replaced with session base path
|
||||
* Dynamic params: {task_id}, {filename}, {dimension}, {iteration}
|
||||
*/
|
||||
const PATH_ROUTES = {
|
||||
'session': '{base}/workflow-session.json',
|
||||
'plan': '{base}/IMPL_PLAN.md',
|
||||
'task': '{base}/.task/{task_id}.json',
|
||||
'summary': '{base}/.summaries/{task_id}-summary.md',
|
||||
'process': '{base}/.process/{filename}',
|
||||
'chat': '{base}/.chat/{filename}',
|
||||
'brainstorm': '{base}/.brainstorming/{filename}',
|
||||
'review-dim': '{base}/.review/dimensions/{dimension}.json',
|
||||
'review-iter': '{base}/.review/iterations/{iteration}.json',
|
||||
'review-fix': '{base}/.review/fixes/{filename}',
|
||||
'todo': '{base}/TODO_LIST.md',
|
||||
'context': '{base}/context-package.json'
|
||||
};
|
||||
|
||||
/**
|
||||
* Resolve path with base and parameters
|
||||
*/
|
||||
function resolvePath(base, contentType, pathParams = {}) {
|
||||
const template = PATH_ROUTES[contentType];
|
||||
if (!template) {
|
||||
throw new Error(`Unknown content_type: ${contentType}. Valid types: ${Object.keys(PATH_ROUTES).join(', ')}`);
|
||||
}
|
||||
|
||||
let path = template.replace('{base}', base);
|
||||
|
||||
// Replace dynamic parameters
|
||||
for (const [key, value] of Object.entries(pathParams)) {
|
||||
path = path.replace(`{${key}}`, value);
|
||||
}
|
||||
|
||||
// Check for unreplaced placeholders
|
||||
const unreplaced = path.match(/\{[^}]+\}/g);
|
||||
if (unreplaced) {
|
||||
throw new Error(`Missing path_params: ${unreplaced.join(', ')} for content_type "${contentType}"`);
|
||||
}
|
||||
|
||||
return resolve(findWorkflowRoot(), path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get session base path for init (always active)
|
||||
*/
|
||||
function getSessionBase(sessionId) {
|
||||
return resolve(findWorkflowRoot(), ACTIVE_BASE, sessionId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto-detect session location by searching all known paths
|
||||
* Search order: active, archives, lite-plan, lite-fix
|
||||
*/
|
||||
function findSession(sessionId) {
|
||||
const root = findWorkflowRoot();
|
||||
const searchPaths = [
|
||||
{ path: resolve(root, ACTIVE_BASE, sessionId), location: 'active' },
|
||||
{ path: resolve(root, ARCHIVE_BASE, sessionId), location: 'archived' },
|
||||
{ path: resolve(root, LITE_PLAN_BASE, sessionId), location: 'lite-plan' },
|
||||
{ path: resolve(root, LITE_FIX_BASE, sessionId), location: 'lite-fix' }
|
||||
];
|
||||
|
||||
for (const { path, location } of searchPaths) {
|
||||
if (existsSync(path)) {
|
||||
return { path, location };
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure directory exists
|
||||
*/
|
||||
function ensureDir(dirPath) {
|
||||
if (!existsSync(dirPath)) {
|
||||
mkdirSync(dirPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read JSON file safely
|
||||
*/
|
||||
function readJsonFile(filePath) {
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(filePath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
throw new Error(`Invalid JSON in ${filePath}: ${error.message}`);
|
||||
}
|
||||
throw new Error(`Failed to read ${filePath}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write JSON file with formatting
|
||||
*/
|
||||
function writeJsonFile(filePath, data) {
|
||||
ensureDir(dirname(filePath));
|
||||
const content = JSON.stringify(data, null, 2);
|
||||
writeFileSync(filePath, content, 'utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Write text file
|
||||
*/
|
||||
function writeTextFile(filePath, content) {
|
||||
ensureDir(dirname(filePath));
|
||||
writeFileSync(filePath, content, 'utf8');
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Operation Handlers
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* Operation: init
|
||||
* Create new session with directory structure
|
||||
*/
|
||||
function executeInit(params) {
|
||||
const { session_id, metadata } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for init');
|
||||
}
|
||||
|
||||
// Validate session_id format
|
||||
validateSessionId(session_id);
|
||||
|
||||
// Check if session already exists (auto-detect all locations)
|
||||
const existing = findSession(session_id);
|
||||
if (existing) {
|
||||
throw new Error(`Session "${session_id}" already exists in ${existing.location}`);
|
||||
}
|
||||
|
||||
const sessionPath = getSessionBase(session_id);
|
||||
|
||||
// Create session directory structure
|
||||
ensureDir(sessionPath);
|
||||
ensureDir(join(sessionPath, '.task'));
|
||||
ensureDir(join(sessionPath, '.summaries'));
|
||||
ensureDir(join(sessionPath, '.process'));
|
||||
|
||||
// Create workflow-session.json if metadata provided
|
||||
let sessionMetadata = null;
|
||||
if (metadata) {
|
||||
const sessionFile = join(sessionPath, 'workflow-session.json');
|
||||
const sessionData = {
|
||||
session_id,
|
||||
status: 'planning',
|
||||
created_at: new Date().toISOString(),
|
||||
...metadata
|
||||
};
|
||||
writeJsonFile(sessionFile, sessionData);
|
||||
sessionMetadata = sessionData;
|
||||
}
|
||||
|
||||
return {
|
||||
operation: 'init',
|
||||
session_id,
|
||||
path: sessionPath,
|
||||
directories_created: ['.task', '.summaries', '.process'],
|
||||
metadata: sessionMetadata,
|
||||
message: `Session "${session_id}" initialized successfully`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: list
|
||||
* List sessions (active, archived, or both)
|
||||
*/
|
||||
function executeList(params) {
|
||||
const { location = 'both', include_metadata = false } = params;
|
||||
|
||||
const result = {
|
||||
operation: 'list',
|
||||
active: [],
|
||||
archived: [],
|
||||
total: 0
|
||||
};
|
||||
|
||||
// List active sessions
|
||||
if (location === 'active' || location === 'both') {
|
||||
const activePath = resolve(findWorkflowRoot(), ACTIVE_BASE);
|
||||
if (existsSync(activePath)) {
|
||||
const entries = readdirSync(activePath, { withFileTypes: true });
|
||||
result.active = entries
|
||||
.filter(e => e.isDirectory() && e.name.startsWith('WFS-'))
|
||||
.map(e => {
|
||||
const sessionInfo = { session_id: e.name, location: 'active' };
|
||||
if (include_metadata) {
|
||||
const metaPath = join(activePath, e.name, 'workflow-session.json');
|
||||
if (existsSync(metaPath)) {
|
||||
try {
|
||||
sessionInfo.metadata = readJsonFile(metaPath);
|
||||
} catch {
|
||||
sessionInfo.metadata = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return sessionInfo;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// List archived sessions
|
||||
if (location === 'archived' || location === 'both') {
|
||||
const archivePath = resolve(findWorkflowRoot(), ARCHIVE_BASE);
|
||||
if (existsSync(archivePath)) {
|
||||
const entries = readdirSync(archivePath, { withFileTypes: true });
|
||||
result.archived = entries
|
||||
.filter(e => e.isDirectory() && e.name.startsWith('WFS-'))
|
||||
.map(e => {
|
||||
const sessionInfo = { session_id: e.name, location: 'archived' };
|
||||
if (include_metadata) {
|
||||
const metaPath = join(archivePath, e.name, 'workflow-session.json');
|
||||
if (existsSync(metaPath)) {
|
||||
try {
|
||||
sessionInfo.metadata = readJsonFile(metaPath);
|
||||
} catch {
|
||||
sessionInfo.metadata = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
return sessionInfo;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
result.total = result.active.length + result.archived.length;
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: read
|
||||
* Read file content by content_type
|
||||
*/
|
||||
function executeRead(params) {
|
||||
const { session_id, content_type, path_params = {} } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for read');
|
||||
}
|
||||
if (!content_type) {
|
||||
throw new Error('Parameter "content_type" is required for read');
|
||||
}
|
||||
|
||||
// Validate inputs
|
||||
validateSessionId(session_id);
|
||||
validatePathParams(path_params);
|
||||
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found`);
|
||||
}
|
||||
|
||||
const filePath = resolvePath(session.path, content_type, path_params);
|
||||
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`File not found: ${filePath}`);
|
||||
}
|
||||
|
||||
// Read content
|
||||
const rawContent = readFileSync(filePath, 'utf8');
|
||||
|
||||
// Parse JSON for JSON content types
|
||||
const isJson = filePath.endsWith('.json');
|
||||
const content = isJson ? JSON.parse(rawContent) : rawContent;
|
||||
|
||||
return {
|
||||
operation: 'read',
|
||||
session_id,
|
||||
content_type,
|
||||
path: filePath,
|
||||
location: session.location,
|
||||
content,
|
||||
is_json: isJson
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: write
|
||||
* Write content to file by content_type
|
||||
*/
|
||||
function executeWrite(params) {
|
||||
const { session_id, content_type, content, path_params = {} } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for write');
|
||||
}
|
||||
if (!content_type) {
|
||||
throw new Error('Parameter "content_type" is required for write');
|
||||
}
|
||||
if (content === undefined) {
|
||||
throw new Error('Parameter "content" is required for write');
|
||||
}
|
||||
|
||||
// Validate inputs
|
||||
validateSessionId(session_id);
|
||||
validatePathParams(path_params);
|
||||
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found. Use init operation first.`);
|
||||
}
|
||||
|
||||
const filePath = resolvePath(session.path, content_type, path_params);
|
||||
const isJson = filePath.endsWith('.json');
|
||||
|
||||
// Write content
|
||||
if (isJson) {
|
||||
writeJsonFile(filePath, content);
|
||||
} else {
|
||||
writeTextFile(filePath, typeof content === 'string' ? content : JSON.stringify(content, null, 2));
|
||||
}
|
||||
|
||||
// Return written content for task/summary types
|
||||
const returnContent = (content_type === 'task' || content_type === 'summary') ? content : undefined;
|
||||
|
||||
return {
|
||||
operation: 'write',
|
||||
session_id,
|
||||
content_type,
|
||||
written_content: returnContent,
|
||||
path: filePath,
|
||||
location: session.location,
|
||||
message: `File written successfully`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: update
|
||||
* Update existing JSON file with shallow merge
|
||||
*/
|
||||
function executeUpdate(params) {
|
||||
const { session_id, content_type, content, path_params = {} } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for update');
|
||||
}
|
||||
if (!content_type) {
|
||||
throw new Error('Parameter "content_type" is required for update');
|
||||
}
|
||||
if (!content || typeof content !== 'object') {
|
||||
throw new Error('Parameter "content" must be an object for update');
|
||||
}
|
||||
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found`);
|
||||
}
|
||||
|
||||
const filePath = resolvePath(session.path, content_type, path_params);
|
||||
|
||||
if (!filePath.endsWith('.json')) {
|
||||
throw new Error('Update operation only supports JSON files');
|
||||
}
|
||||
|
||||
// Read existing content or start with empty object
|
||||
let existing = {};
|
||||
if (existsSync(filePath)) {
|
||||
existing = readJsonFile(filePath);
|
||||
}
|
||||
|
||||
// Shallow merge
|
||||
const merged = { ...existing, ...content };
|
||||
writeJsonFile(filePath, merged);
|
||||
|
||||
return {
|
||||
operation: 'update',
|
||||
session_id,
|
||||
content_type,
|
||||
path: filePath,
|
||||
location: session.location,
|
||||
fields_updated: Object.keys(content),
|
||||
merged_data: merged,
|
||||
message: `File updated successfully`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: archive
|
||||
* Move session from active to archives
|
||||
*/
|
||||
function executeArchive(params) {
|
||||
const { session_id, update_status = true } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for archive');
|
||||
}
|
||||
|
||||
const activePath = getSessionBase(session_id, false);
|
||||
const archivePath = getSessionBase(session_id, true);
|
||||
|
||||
if (!existsSync(activePath)) {
|
||||
// Check if already archived
|
||||
if (existsSync(archivePath)) {
|
||||
return {
|
||||
operation: 'archive',
|
||||
session_id,
|
||||
status: 'already_archived',
|
||||
path: archivePath,
|
||||
message: `Session "${session_id}" is already archived`
|
||||
};
|
||||
}
|
||||
throw new Error(`Session "${session_id}" not found in active sessions`);
|
||||
}
|
||||
|
||||
// Update status to completed before archiving
|
||||
if (update_status) {
|
||||
const sessionFile = join(activePath, 'workflow-session.json');
|
||||
if (existsSync(sessionFile)) {
|
||||
const sessionData = readJsonFile(sessionFile);
|
||||
sessionData.status = 'completed';
|
||||
sessionData.archived_at = new Date().toISOString();
|
||||
writeJsonFile(sessionFile, sessionData);
|
||||
}
|
||||
}
|
||||
|
||||
// Ensure archive directory exists
|
||||
ensureDir(dirname(archivePath));
|
||||
|
||||
// Move session directory
|
||||
renameSync(activePath, archivePath);
|
||||
|
||||
// Read session metadata after archiving
|
||||
let sessionMetadata = null;
|
||||
const sessionFile = join(archivePath, 'workflow-session.json');
|
||||
if (existsSync(sessionFile)) {
|
||||
sessionMetadata = readJsonFile(sessionFile);
|
||||
}
|
||||
|
||||
return {
|
||||
operation: 'archive',
|
||||
session_id,
|
||||
status: 'archived',
|
||||
source: activePath,
|
||||
destination: archivePath,
|
||||
metadata: sessionMetadata,
|
||||
message: `Session "${session_id}" archived successfully`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: mkdir
|
||||
* Create directory structure within session
|
||||
*/
|
||||
function executeMkdir(params) {
|
||||
const { session_id, dirs } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for mkdir');
|
||||
}
|
||||
if (!dirs || !Array.isArray(dirs) || dirs.length === 0) {
|
||||
throw new Error('Parameter "dirs" must be a non-empty array');
|
||||
}
|
||||
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found`);
|
||||
}
|
||||
|
||||
const created = [];
|
||||
for (const dir of dirs) {
|
||||
const dirPath = join(session.path, dir);
|
||||
ensureDir(dirPath);
|
||||
created.push(dir);
|
||||
}
|
||||
|
||||
return {
|
||||
operation: 'mkdir',
|
||||
session_id,
|
||||
location: session.location,
|
||||
directories_created: created,
|
||||
message: `Created ${created.length} directories`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: delete
|
||||
* Delete a file within session (security: path traversal prevention)
|
||||
*/
|
||||
function executeDelete(params) {
|
||||
const { session_id, file_path } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for delete');
|
||||
}
|
||||
if (!file_path) {
|
||||
throw new Error('Parameter "file_path" is required for delete');
|
||||
}
|
||||
|
||||
// Validate session exists
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found`);
|
||||
}
|
||||
|
||||
// Security: Prevent path traversal
|
||||
if (file_path.includes('..') || file_path.includes('\\')) {
|
||||
throw new Error('Invalid file_path: path traversal characters not allowed');
|
||||
}
|
||||
|
||||
// Construct absolute path
|
||||
const absolutePath = resolve(session.path, file_path);
|
||||
|
||||
// Security: Verify path is within session directory
|
||||
if (!absolutePath.startsWith(session.path)) {
|
||||
throw new Error('Security error: file_path must be within session directory');
|
||||
}
|
||||
|
||||
// Check file exists
|
||||
if (!existsSync(absolutePath)) {
|
||||
throw new Error(`File not found: ${file_path}`);
|
||||
}
|
||||
|
||||
// Delete the file
|
||||
rmSync(absolutePath, { force: true });
|
||||
|
||||
return {
|
||||
operation: 'delete',
|
||||
session_id,
|
||||
deleted: file_path,
|
||||
absolute_path: absolutePath,
|
||||
message: `File deleted successfully`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation: stats
|
||||
* Get session statistics (tasks, summaries, plan)
|
||||
*/
|
||||
function executeStats(params) {
|
||||
const { session_id } = params;
|
||||
|
||||
if (!session_id) {
|
||||
throw new Error('Parameter "session_id" is required for stats');
|
||||
}
|
||||
|
||||
// Validate session exists
|
||||
const session = findSession(session_id);
|
||||
if (!session) {
|
||||
throw new Error(`Session "${session_id}" not found`);
|
||||
}
|
||||
|
||||
const taskDir = join(session.path, '.task');
|
||||
const summariesDir = join(session.path, '.summaries');
|
||||
const planFile = join(session.path, 'IMPL_PLAN.md');
|
||||
|
||||
// Count tasks by status
|
||||
const taskStats = {
|
||||
total: 0,
|
||||
pending: 0,
|
||||
in_progress: 0,
|
||||
completed: 0,
|
||||
blocked: 0,
|
||||
cancelled: 0
|
||||
};
|
||||
|
||||
if (existsSync(taskDir)) {
|
||||
const taskFiles = readdirSync(taskDir).filter(f => f.endsWith('.json'));
|
||||
taskStats.total = taskFiles.length;
|
||||
|
||||
for (const taskFile of taskFiles) {
|
||||
try {
|
||||
const taskPath = join(taskDir, taskFile);
|
||||
const taskData = readJsonFile(taskPath);
|
||||
const status = taskData.status || 'unknown';
|
||||
if (status in taskStats) {
|
||||
taskStats[status]++;
|
||||
}
|
||||
} catch {
|
||||
// Skip invalid task files
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Count summaries
|
||||
let summariesCount = 0;
|
||||
if (existsSync(summariesDir)) {
|
||||
summariesCount = readdirSync(summariesDir).filter(f => f.endsWith('.md')).length;
|
||||
}
|
||||
|
||||
// Check for plan
|
||||
const hasPlan = existsSync(planFile);
|
||||
|
||||
return {
|
||||
operation: 'stats',
|
||||
session_id,
|
||||
location: session.location,
|
||||
tasks: taskStats,
|
||||
summaries: summariesCount,
|
||||
has_plan: hasPlan,
|
||||
message: `Session statistics retrieved`
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Main Execute Function
|
||||
// ============================================================
|
||||
|
||||
/**
|
||||
* Route to appropriate operation handler
|
||||
*/
|
||||
async function execute(params) {
|
||||
const { operation } = params;
|
||||
|
||||
if (!operation) {
|
||||
throw new Error('Parameter "operation" is required. Valid operations: init, list, read, write, update, archive, mkdir, delete, stats');
|
||||
}
|
||||
|
||||
switch (operation) {
|
||||
case 'init':
|
||||
return executeInit(params);
|
||||
case 'list':
|
||||
return executeList(params);
|
||||
case 'read':
|
||||
return executeRead(params);
|
||||
case 'write':
|
||||
return executeWrite(params);
|
||||
case 'update':
|
||||
return executeUpdate(params);
|
||||
case 'archive':
|
||||
return executeArchive(params);
|
||||
case 'mkdir':
|
||||
return executeMkdir(params);
|
||||
case 'delete':
|
||||
return executeDelete(params);
|
||||
case 'stats':
|
||||
return executeStats(params);
|
||||
default:
|
||||
throw new Error(`Unknown operation: ${operation}. Valid operations: init, list, read, write, update, archive, mkdir, delete, stats`);
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================
|
||||
// Tool Definition
|
||||
// ============================================================
|
||||
|
||||
export const sessionManagerTool = {
|
||||
name: 'session_manager',
|
||||
description: `Workflow session lifecycle management tool.
|
||||
|
||||
Operations:
|
||||
- init: Create new session with directory structure
|
||||
- list: List sessions (active, archived, or both)
|
||||
- read: Read file content by content_type
|
||||
- write: Write content to file by content_type
|
||||
- update: Update existing JSON file (shallow merge)
|
||||
- archive: Move session from active to archives
|
||||
- mkdir: Create directories within session
|
||||
- delete: Delete a file within session
|
||||
- stats: Get session statistics (tasks, summaries, plan)
|
||||
|
||||
Content Types:
|
||||
session, plan, task, summary, process, chat, brainstorm,
|
||||
review-dim, review-iter, review-fix, todo, context
|
||||
|
||||
Usage:
|
||||
ccw tool exec session_manager '{"operation":"list"}'
|
||||
ccw tool exec session_manager '{"operation":"init","session_id":"WFS-test"}'
|
||||
ccw tool exec session_manager '{"operation":"read","session_id":"WFS-test","content_type":"session"}'
|
||||
ccw tool exec session_manager '{"operation":"stats","session_id":"WFS-test"}'`,
|
||||
|
||||
parameters: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
operation: {
|
||||
type: 'string',
|
||||
enum: ['init', 'list', 'read', 'write', 'update', 'archive', 'mkdir', 'delete', 'stats'],
|
||||
description: 'Operation to perform'
|
||||
},
|
||||
session_id: {
|
||||
type: 'string',
|
||||
description: 'Session identifier (e.g., WFS-my-session). Required for all operations except list.'
|
||||
},
|
||||
content_type: {
|
||||
type: 'string',
|
||||
enum: ['session', 'plan', 'task', 'summary', 'process', 'chat', 'brainstorm', 'review-dim', 'review-iter', 'review-fix', 'todo', 'context'],
|
||||
description: 'Content type for read/write/update operations'
|
||||
},
|
||||
content: {
|
||||
type: 'object',
|
||||
description: 'Content for write/update operations (object for JSON, string for text)'
|
||||
},
|
||||
path_params: {
|
||||
type: 'object',
|
||||
description: 'Dynamic path parameters: task_id, filename, dimension, iteration'
|
||||
},
|
||||
metadata: {
|
||||
type: 'object',
|
||||
description: 'Session metadata for init operation (project, type, description, etc.)'
|
||||
},
|
||||
location: {
|
||||
type: 'string',
|
||||
enum: ['active', 'archived', 'both'],
|
||||
description: 'Session location filter for list operation (default: both)'
|
||||
},
|
||||
include_metadata: {
|
||||
type: 'boolean',
|
||||
description: 'Include session metadata in list results (default: false)'
|
||||
},
|
||||
dirs: {
|
||||
type: 'array',
|
||||
description: 'Directory paths to create for mkdir operation'
|
||||
},
|
||||
update_status: {
|
||||
type: 'boolean',
|
||||
description: 'Update session status to completed when archiving (default: true)'
|
||||
},
|
||||
file_path: {
|
||||
type: 'string',
|
||||
description: 'Relative file path within session for delete operation'
|
||||
}
|
||||
},
|
||||
required: ['operation']
|
||||
},
|
||||
execute
|
||||
};
|
||||
Reference in New Issue
Block a user