mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-03-03 15:43:11 +08:00
feat: add configuration backup, sync, and version checker services
- Implemented ConfigBackupService for backing up local configuration files. - Added ConfigSyncService to download configuration files from GitHub with remote-first conflict resolution. - Created VersionChecker to check application version against the latest GitHub release with caching. - Introduced security validation utilities for input validation to prevent common vulnerabilities. - Developed utility functions to start and stop Docusaurus documentation server.
This commit is contained in:
323
ccw/src/core/routes/config-routes.ts
Normal file
323
ccw/src/core/routes/config-routes.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
/**
|
||||
* Config Routes Module
|
||||
* HTTP API endpoints for configuration backup and synchronization from GitHub
|
||||
*
|
||||
* Backup Endpoints:
|
||||
* - POST /api/config/backup - Create backup
|
||||
* - GET /api/config/backups - List backups
|
||||
* - DELETE /api/config/backup/:name - Delete backup
|
||||
* - POST /api/config/backup/:name/restore - Restore backup
|
||||
*
|
||||
* Sync Endpoints:
|
||||
* - POST /api/config/sync - Sync config files from GitHub (remote-first)
|
||||
* - GET /api/config/status - Get sync status (local vs remote comparison)
|
||||
* - GET /api/config/remote - List available remote config files
|
||||
*/
|
||||
|
||||
import type { RouteContext } from './types.js';
|
||||
import { ConfigBackupService } from '../services/config-backup.js';
|
||||
import { getConfigSyncService } from '../services/config-sync.js';
|
||||
import { isValidBackupName, validateConfigDirs, validateGitHubParams } from '../../utils/security-validation.js';
|
||||
|
||||
/**
|
||||
* Handle config routes
|
||||
* @returns true if route was handled, false otherwise
|
||||
*/
|
||||
export async function handleConfigRoutes(ctx: RouteContext): Promise<boolean> {
|
||||
const { pathname, req, res, handlePostRequest, broadcastToClients } = ctx;
|
||||
|
||||
// ========== CREATE BACKUP ==========
|
||||
// POST /api/config/backup
|
||||
if (pathname === '/api/config/backup' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body: unknown) => {
|
||||
try {
|
||||
const { configDirs, backupName } = body as { configDirs?: string[]; backupName?: string };
|
||||
|
||||
// SECURITY: Validate inputs
|
||||
if (backupName && !isValidBackupName(backupName)) {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
|
||||
};
|
||||
}
|
||||
|
||||
if (configDirs) {
|
||||
try {
|
||||
validateConfigDirs(configDirs);
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
const backupService = new ConfigBackupService();
|
||||
const result = await backupService.createBackup({ configDirs, backupName });
|
||||
|
||||
if (result.success) {
|
||||
// Broadcast backup created event
|
||||
broadcastToClients({
|
||||
type: 'CONFIG_BACKUP_CREATED',
|
||||
payload: {
|
||||
backupPath: result.backupPath,
|
||||
fileCount: result.fileCount,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
return { success: false, error: (err as Error).message, fileCount: 0 };
|
||||
}
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// ========== LIST BACKUPS ==========
|
||||
// GET /api/config/backups
|
||||
if (pathname === '/api/config/backups' && req.method === 'GET') {
|
||||
try {
|
||||
const backupService = new ConfigBackupService();
|
||||
const backups = await backupService.listBackups();
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, data: backups }));
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// ========== DELETE BACKUP ==========
|
||||
// DELETE /api/config/backup/:name
|
||||
const deleteMatch = pathname.match(/^\/api\/config\/backup\/([^/]+)$/);
|
||||
if (deleteMatch && req.method === 'DELETE') {
|
||||
const backupName = deleteMatch[1];
|
||||
|
||||
// SECURITY: Validate backup name to prevent path traversal
|
||||
if (!isValidBackupName(backupName)) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: false,
|
||||
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const backupService = new ConfigBackupService();
|
||||
const result = await backupService.deleteBackup(backupName);
|
||||
|
||||
if (result.success) {
|
||||
// Broadcast backup deleted event
|
||||
broadcastToClients({
|
||||
type: 'CONFIG_BACKUP_DELETED',
|
||||
payload: {
|
||||
backupName,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// ========== RESTORE BACKUP ==========
|
||||
// POST /api/config/backup/:name/restore
|
||||
const restoreMatch = pathname.match(/^\/api\/config\/backup\/([^/]+)\/restore$/);
|
||||
if (restoreMatch && req.method === 'POST') {
|
||||
const backupName = restoreMatch[1];
|
||||
|
||||
// SECURITY: Validate backup name to prevent path traversal
|
||||
if (!isValidBackupName(backupName)) {
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: false,
|
||||
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
|
||||
try {
|
||||
const backupService = new ConfigBackupService();
|
||||
const result = await backupService.restoreBackup(backupName);
|
||||
|
||||
if (result.success) {
|
||||
// Broadcast backup restored event
|
||||
broadcastToClients({
|
||||
type: 'CONFIG_BACKUP_RESTORED',
|
||||
payload: {
|
||||
backupName,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
} else {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify(result));
|
||||
}
|
||||
} catch (err) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// ========== SYNC CONFIG FROM GITHUB ==========
|
||||
// POST /api/config/sync - Sync config files from GitHub (remote-first)
|
||||
if (pathname === '/api/config/sync' && req.method === 'POST') {
|
||||
handlePostRequest(req, res, async (body) => {
|
||||
const { owner, repo, branch, configDirs, baseDir, overwrite } = body as {
|
||||
owner?: string;
|
||||
repo?: string;
|
||||
branch?: string;
|
||||
configDirs?: string[];
|
||||
baseDir?: string;
|
||||
overwrite?: boolean;
|
||||
};
|
||||
|
||||
// SECURITY: Validate GitHub parameters (SSRF protection)
|
||||
try {
|
||||
validateGitHubParams({ owner, repo, branch });
|
||||
|
||||
// Validate config directories (path traversal protection)
|
||||
if (configDirs) {
|
||||
validateConfigDirs(configDirs);
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : String(error)
|
||||
};
|
||||
}
|
||||
|
||||
const syncService = getConfigSyncService();
|
||||
const result = await syncService.syncConfig({
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
configDirs,
|
||||
baseDir,
|
||||
overwrite: overwrite !== false, // default true
|
||||
});
|
||||
|
||||
// Broadcast to connected dashboard clients on success
|
||||
if (result.success && broadcastToClients) {
|
||||
broadcastToClients({
|
||||
type: 'CONFIG_SYNCED',
|
||||
payload: {
|
||||
syncedFiles: result.syncedFiles,
|
||||
skippedFiles: result.skippedFiles,
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
// ========== GET SYNC STATUS ==========
|
||||
// GET /api/config/status - Get sync status (local vs remote comparison)
|
||||
if (pathname === '/api/config/status' && req.method === 'GET') {
|
||||
try {
|
||||
const url = ctx.url;
|
||||
const owner = url.searchParams.get('owner') || undefined;
|
||||
const repo = url.searchParams.get('repo') || undefined;
|
||||
const branch = url.searchParams.get('branch') || undefined;
|
||||
const configDirsParam = url.searchParams.get('configDirs');
|
||||
const configDirs = configDirsParam ? configDirsParam.split(',') : undefined;
|
||||
const baseDir = url.searchParams.get('baseDir') || undefined;
|
||||
|
||||
// SECURITY: Validate inputs
|
||||
validateGitHubParams({ owner, repo, branch });
|
||||
if (configDirs) {
|
||||
validateConfigDirs(configDirs);
|
||||
}
|
||||
|
||||
const syncService = getConfigSyncService();
|
||||
const status = await syncService.getSyncStatus({
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
configDirs,
|
||||
baseDir,
|
||||
});
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
data: status,
|
||||
timestamp: new Date().toISOString(),
|
||||
}));
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: false,
|
||||
error: message,
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// ========== LIST REMOTE CONFIG FILES ==========
|
||||
// GET /api/config/remote - List available remote config files
|
||||
if (pathname === '/api/config/remote' && req.method === 'GET') {
|
||||
try {
|
||||
const url = ctx.url;
|
||||
const owner = url.searchParams.get('owner') || undefined;
|
||||
const repo = url.searchParams.get('repo') || undefined;
|
||||
const branch = url.searchParams.get('branch') || undefined;
|
||||
const configDir = url.searchParams.get('configDir') || '.claude';
|
||||
|
||||
// SECURITY: Validate inputs
|
||||
validateGitHubParams({ owner, repo, branch });
|
||||
validateConfigDirs([configDir]); // Single dir validation
|
||||
|
||||
const syncService = getConfigSyncService();
|
||||
const files = await syncService.listRemoteFiles(configDir, {
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
});
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: true,
|
||||
data: {
|
||||
configDir,
|
||||
files,
|
||||
},
|
||||
timestamp: new Date().toISOString(),
|
||||
}));
|
||||
return true;
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
res.writeHead(400, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({
|
||||
success: false,
|
||||
error: message,
|
||||
}));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -24,6 +24,9 @@
|
||||
* - POST /api/orchestrator/templates/install - Install template from URL or GitHub
|
||||
* - DELETE /api/orchestrator/templates/:id - Delete local template
|
||||
* - POST /api/orchestrator/templates/export - Export flow as template
|
||||
*
|
||||
* Configuration Endpoints:
|
||||
* - GET /api/config/version - Check application version against GitHub
|
||||
*/
|
||||
|
||||
import { join, dirname } from 'path';
|
||||
@@ -1732,5 +1735,24 @@ export async function handleOrchestratorRoutes(ctx: RouteContext): Promise<boole
|
||||
}
|
||||
}
|
||||
|
||||
// ==== VERSION CHECK ====
|
||||
// GET /api/config/version
|
||||
// Check application version against GitHub latest release
|
||||
if (pathname === '/api/config/version' && req.method === 'GET') {
|
||||
try {
|
||||
const { VersionChecker } = await import('../services/version-checker.js');
|
||||
const checker = new VersionChecker();
|
||||
const result = await checker.checkVersion();
|
||||
|
||||
res.writeHead(200, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: true, data: result }));
|
||||
return true;
|
||||
} catch (error: any) {
|
||||
res.writeHead(500, { 'Content-Type': 'application/json' });
|
||||
res.end(JSON.stringify({ success: false, error: error.message }));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -36,6 +36,7 @@ import { handleTestLoopRoutes } from './routes/test-loop-routes.js';
|
||||
import { handleTaskRoutes } from './routes/task-routes.js';
|
||||
import { handleDashboardRoutes } from './routes/dashboard-routes.js';
|
||||
import { handleOrchestratorRoutes } from './routes/orchestrator-routes.js';
|
||||
import { handleConfigRoutes } from './routes/config-routes.js';
|
||||
|
||||
// Import WebSocket handling
|
||||
import { handleWebSocketUpgrade, broadcastToClients, extractSessionIdFromPath } from './websocket.js';
|
||||
@@ -452,6 +453,7 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
console.log(`[Server] Frontend mode: ${frontend}`);
|
||||
if (frontend === 'react' || frontend === 'both') {
|
||||
console.log(`[Server] React proxy configured: /react/* -> http://localhost:${reactPort}`);
|
||||
console.log(`[Server] Docs proxy configured: /docs/* -> http://localhost:3001`);
|
||||
}
|
||||
|
||||
const tokenManager = getTokenManager();
|
||||
@@ -653,6 +655,11 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
if (await handleOrchestratorRoutes(routeContext)) return;
|
||||
}
|
||||
|
||||
// Config routes (/api/config/*)
|
||||
if (pathname.startsWith('/api/config/')) {
|
||||
if (await handleConfigRoutes(routeContext)) return;
|
||||
}
|
||||
|
||||
// Loop V2 routes (/api/loops/v2/*) - must be checked before v1
|
||||
if (pathname.startsWith('/api/loops/v2')) {
|
||||
if (await handleLoopV2Routes(routeContext)) return;
|
||||
@@ -820,6 +827,69 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
|
||||
}
|
||||
}
|
||||
|
||||
// Docs site proxy - proxy requests to Docusaurus dev server (port 3001)
|
||||
// Redirect /docs to /docs/ to match Docusaurus baseUrl
|
||||
if (pathname === '/docs') {
|
||||
res.writeHead(302, { 'Location': `/docs/${url.search}` });
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
// Proxy /docs/* requests to Docusaurus
|
||||
if (pathname.startsWith('/docs/')) {
|
||||
const docsPort = 3001;
|
||||
// Preserve the /docs prefix when forwarding to Docusaurus
|
||||
const docsUrl = `http://localhost:${docsPort}${pathname}${url.search}`;
|
||||
|
||||
console.log(`[Docs Proxy] Proxying ${pathname} -> ${docsUrl}`);
|
||||
|
||||
try {
|
||||
// Convert headers to plain object for fetch
|
||||
const proxyHeaders: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(req.headers)) {
|
||||
if (typeof value === 'string') {
|
||||
proxyHeaders[key] = value;
|
||||
} else if (Array.isArray(value)) {
|
||||
proxyHeaders[key] = value.join(', ');
|
||||
}
|
||||
}
|
||||
proxyHeaders['host'] = `localhost:${docsPort}`;
|
||||
|
||||
const docsResponse = await fetch(docsUrl, {
|
||||
method: req.method,
|
||||
headers: proxyHeaders,
|
||||
body: req.method !== 'GET' && req.method !== 'HEAD' ? await readRequestBody(req) : undefined,
|
||||
});
|
||||
|
||||
const contentType = docsResponse.headers.get('content-type') || 'text/html';
|
||||
const body = await docsResponse.text();
|
||||
|
||||
// Forward response headers
|
||||
const responseHeaders: Record<string, string> = {
|
||||
'Content-Type': contentType,
|
||||
'Cache-Control': 'no-cache',
|
||||
};
|
||||
|
||||
// Forward Set-Cookie headers if present
|
||||
const setCookieHeaders = docsResponse.headers.get('set-cookie');
|
||||
if (setCookieHeaders) {
|
||||
responseHeaders['Set-Cookie'] = setCookieHeaders;
|
||||
}
|
||||
|
||||
console.log(`[Docs Proxy] Response ${docsResponse.status}: ${contentType}`);
|
||||
|
||||
res.writeHead(docsResponse.status, responseHeaders);
|
||||
res.end(body);
|
||||
return;
|
||||
} catch (err) {
|
||||
console.error(`[Docs Proxy] Failed to proxy to ${docsUrl}:`, err);
|
||||
console.error(`[Docs Proxy] Error details:`, (err as Error).message);
|
||||
res.writeHead(502, { 'Content-Type': 'text/plain' });
|
||||
res.end(`Bad Gateway: Docs site not available at ${docsUrl}\nMake sure the Docusaurus server is running on port ${docsPort}.\nError: ${(err as Error).message}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Root path - serve JS frontend HTML (default or both mode)
|
||||
if (pathname === '/' || pathname === '/index.html') {
|
||||
const html = generateServerDashboard(initialPath);
|
||||
|
||||
212
ccw/src/core/services/config-backup.ts
Normal file
212
ccw/src/core/services/config-backup.ts
Normal file
@@ -0,0 +1,212 @@
|
||||
/**
|
||||
* Config Backup Service
|
||||
* Handles backup of local configuration files (.claude, .codex, .gemini, .qwen)
|
||||
*/
|
||||
|
||||
import { mkdir, readdir, stat, copyFile, rm, access } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
|
||||
export interface BackupOptions {
|
||||
/** Configuration directories to backup (default: ['.claude']) */
|
||||
configDirs?: string[];
|
||||
/** Custom backup name (default: auto-generated timestamp) */
|
||||
backupName?: string;
|
||||
}
|
||||
|
||||
export interface BackupResult {
|
||||
success: boolean;
|
||||
backupPath?: string;
|
||||
fileCount: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface BackupInfo {
|
||||
name: string;
|
||||
path: string;
|
||||
createdAt: Date;
|
||||
fileCount: number;
|
||||
}
|
||||
|
||||
export class ConfigBackupService {
|
||||
private ccwDir: string;
|
||||
private backupDir: string;
|
||||
|
||||
constructor() {
|
||||
this.ccwDir = join(homedir(), '.ccw');
|
||||
this.backupDir = join(this.ccwDir, 'backups');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a backup of configuration directories
|
||||
* @param options - Backup options
|
||||
* @returns Backup result with path and file count
|
||||
*/
|
||||
async createBackup(options: BackupOptions = {}): Promise<BackupResult> {
|
||||
const { configDirs = ['.claude'], backupName } = options;
|
||||
|
||||
try {
|
||||
// Create backup directory
|
||||
await mkdir(this.backupDir, { recursive: true });
|
||||
|
||||
// Generate backup name
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').substring(0, 19);
|
||||
const backupNameFinal = backupName || `backup-${timestamp}`;
|
||||
const backupPath = join(this.backupDir, backupNameFinal);
|
||||
|
||||
await mkdir(backupPath, { recursive: true });
|
||||
|
||||
let fileCount = 0;
|
||||
|
||||
// Backup each config directory
|
||||
for (const configDir of configDirs) {
|
||||
const sourcePath = join(this.ccwDir, configDir);
|
||||
const targetPath = join(backupPath, configDir);
|
||||
|
||||
// Check if source exists
|
||||
try {
|
||||
await access(sourcePath);
|
||||
} catch {
|
||||
continue; // Skip if doesn't exist
|
||||
}
|
||||
|
||||
// Copy directory recursively
|
||||
await this.copyDirectory(sourcePath, targetPath);
|
||||
|
||||
// Count files
|
||||
const files = await this.countFiles(targetPath);
|
||||
fileCount += files;
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
backupPath,
|
||||
fileCount
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
return {
|
||||
success: false,
|
||||
error: (error as Error).message,
|
||||
fileCount: 0
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available backups
|
||||
* @returns Array of backup information sorted by creation date (newest first)
|
||||
*/
|
||||
async listBackups(): Promise<BackupInfo[]> {
|
||||
try {
|
||||
const entries = await readdir(this.backupDir, { withFileTypes: true });
|
||||
const backups: BackupInfo[] = [];
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const backupPath = join(this.backupDir, entry.name);
|
||||
const stats = await stat(backupPath);
|
||||
const fileCount = await this.countFiles(backupPath);
|
||||
|
||||
backups.push({
|
||||
name: entry.name,
|
||||
path: backupPath,
|
||||
createdAt: stats.mtime,
|
||||
fileCount
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return backups.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a specific backup
|
||||
* @param backupName - Name of the backup to delete
|
||||
* @returns Success status
|
||||
*/
|
||||
async deleteBackup(backupName: string): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
const backupPath = join(this.backupDir, backupName);
|
||||
await rm(backupPath, { recursive: true, force: true });
|
||||
return { success: true };
|
||||
} catch (error: unknown) {
|
||||
return {
|
||||
success: false,
|
||||
error: (error as Error).message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore a backup to the original location
|
||||
* @param backupName - Name of the backup to restore
|
||||
* @returns Success status
|
||||
*/
|
||||
async restoreBackup(backupName: string): Promise<{ success: boolean; error?: string }> {
|
||||
try {
|
||||
const backupPath = join(this.backupDir, backupName);
|
||||
const entries = await readdir(backupPath, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
const sourcePath = join(backupPath, entry.name);
|
||||
const targetPath = join(this.ccwDir, entry.name);
|
||||
|
||||
// Copy directory back to original location
|
||||
await this.copyDirectory(sourcePath, targetPath);
|
||||
}
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
} catch (error: unknown) {
|
||||
return {
|
||||
success: false,
|
||||
error: (error as Error).message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy directory recursively
|
||||
* @param src - Source directory path
|
||||
* @param dest - Destination directory path
|
||||
*/
|
||||
private async copyDirectory(src: string, dest: string): Promise<void> {
|
||||
await mkdir(dest, { recursive: true });
|
||||
const entries = await readdir(src, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
const srcPath = join(src, entry.name);
|
||||
const destPath = join(dest, entry.name);
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await this.copyDirectory(srcPath, destPath);
|
||||
} else {
|
||||
await copyFile(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Count files in a directory recursively
|
||||
* @param dir - Directory path
|
||||
* @returns Number of files
|
||||
*/
|
||||
private async countFiles(dir: string): Promise<number> {
|
||||
let count = 0;
|
||||
const entries = await readdir(dir, { withFileTypes: true });
|
||||
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
count += await this.countFiles(join(dir, entry.name));
|
||||
} else {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
|
||||
return count;
|
||||
}
|
||||
}
|
||||
330
ccw/src/core/services/config-sync.ts
Normal file
330
ccw/src/core/services/config-sync.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
/**
|
||||
* Config Sync Service
|
||||
* Downloads configuration files from GitHub using remote-first conflict resolution
|
||||
*/
|
||||
|
||||
import { promises as fs } from 'fs';
|
||||
import { join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import {
|
||||
validateConfigDirs,
|
||||
validateGitHubParams,
|
||||
VALID_CONFIG_DIRS,
|
||||
type ValidConfigDir
|
||||
} from '../../utils/security-validation.js';
|
||||
|
||||
/**
|
||||
* Default GitHub repository configuration for remote config sync
|
||||
*/
|
||||
const DEFAULT_GITHUB_CONFIG = {
|
||||
owner: 'dyw0830',
|
||||
repo: 'ccw',
|
||||
branch: 'main',
|
||||
};
|
||||
|
||||
/**
|
||||
* Default config directories to sync
|
||||
* Uses whitelist from security-validation
|
||||
*/
|
||||
const DEFAULT_CONFIG_DIRS: ValidConfigDir[] = ['.claude'];
|
||||
|
||||
/**
|
||||
* Common configuration files to sync from each config directory
|
||||
*/
|
||||
const COMMON_CONFIG_FILES = [
|
||||
'settings.json',
|
||||
'config.json',
|
||||
'CLAUDE.md',
|
||||
'cli-tools.json',
|
||||
'guidelines.json',
|
||||
'prompts.json',
|
||||
];
|
||||
|
||||
/**
|
||||
* Sync result interface
|
||||
*/
|
||||
export interface ConfigSyncResult {
|
||||
success: boolean;
|
||||
syncedFiles: string[];
|
||||
errors: string[];
|
||||
skippedFiles: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Config sync options interface
|
||||
*/
|
||||
export interface ConfigSyncOptions {
|
||||
/** GitHub repository owner (default: 'dyw0830') */
|
||||
owner?: string;
|
||||
/** GitHub repository name (default: 'ccw') */
|
||||
repo?: string;
|
||||
/** Git branch (default: 'main') */
|
||||
branch?: string;
|
||||
/** Config directories to sync (default: ['.claude']) */
|
||||
configDirs?: string[];
|
||||
/** Target base directory (default: ~/.ccw) */
|
||||
baseDir?: string;
|
||||
/** Remote-first: overwrite local files (default: true) */
|
||||
overwrite?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Config Sync Service
|
||||
* Downloads configuration files from GitHub with remote-first conflict resolution
|
||||
*/
|
||||
export class ConfigSyncService {
|
||||
/**
|
||||
* Sync configuration files from GitHub
|
||||
* @param options - Sync options
|
||||
* @returns Sync result with status, files synced, and any errors
|
||||
*/
|
||||
async syncConfig(options: ConfigSyncOptions = {}): Promise<ConfigSyncResult> {
|
||||
const {
|
||||
owner = DEFAULT_GITHUB_CONFIG.owner,
|
||||
repo = DEFAULT_GITHUB_CONFIG.repo,
|
||||
branch = DEFAULT_GITHUB_CONFIG.branch,
|
||||
configDirs = DEFAULT_CONFIG_DIRS,
|
||||
baseDir = join(homedir(), '.ccw'),
|
||||
overwrite = true,
|
||||
} = options;
|
||||
|
||||
// SECURITY: Validate all inputs before processing
|
||||
try {
|
||||
// Validate GitHub parameters (SSRF protection)
|
||||
validateGitHubParams({ owner, repo, branch });
|
||||
|
||||
// Validate config directories (path traversal protection)
|
||||
validateConfigDirs(configDirs);
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
syncedFiles: [],
|
||||
errors: [error instanceof Error ? error.message : String(error)],
|
||||
skippedFiles: [],
|
||||
};
|
||||
}
|
||||
|
||||
const results: ConfigSyncResult = {
|
||||
success: true,
|
||||
syncedFiles: [],
|
||||
errors: [],
|
||||
skippedFiles: [],
|
||||
};
|
||||
|
||||
for (const configDir of configDirs) {
|
||||
try {
|
||||
const dirResult = await this.syncConfigDirectory(configDir, {
|
||||
owner,
|
||||
repo,
|
||||
branch,
|
||||
baseDir,
|
||||
overwrite,
|
||||
});
|
||||
|
||||
results.syncedFiles.push(...dirResult.syncedFiles);
|
||||
results.errors.push(...dirResult.errors);
|
||||
results.skippedFiles.push(...dirResult.skippedFiles);
|
||||
|
||||
if (!dirResult.success) {
|
||||
results.success = false;
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
results.errors.push(`${configDir}: ${message}`);
|
||||
results.success = false;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync a single config directory
|
||||
*/
|
||||
private async syncConfigDirectory(
|
||||
configDir: string,
|
||||
options: {
|
||||
owner: string;
|
||||
repo: string;
|
||||
branch: string;
|
||||
baseDir: string;
|
||||
overwrite: boolean;
|
||||
}
|
||||
): Promise<ConfigSyncResult> {
|
||||
const { owner, repo, branch, baseDir, overwrite } = options;
|
||||
const result: ConfigSyncResult = {
|
||||
success: true,
|
||||
syncedFiles: [],
|
||||
errors: [],
|
||||
skippedFiles: [],
|
||||
};
|
||||
|
||||
const localPath = join(baseDir, configDir);
|
||||
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
|
||||
|
||||
// Ensure local directory exists
|
||||
await fs.mkdir(localPath, { recursive: true });
|
||||
|
||||
// Try to sync each common config file
|
||||
for (const file of COMMON_CONFIG_FILES) {
|
||||
const fileUrl = `${baseUrl}/${file}`;
|
||||
const localFilePath = join(localPath, file);
|
||||
|
||||
try {
|
||||
// Check if remote file exists
|
||||
const response = await fetch(fileUrl);
|
||||
if (!response.ok) {
|
||||
// File doesn't exist on remote, skip
|
||||
continue;
|
||||
}
|
||||
|
||||
const content = await response.text();
|
||||
|
||||
// Check if local file exists
|
||||
const localExists = await this.fileExists(localFilePath);
|
||||
|
||||
if (localExists && !overwrite) {
|
||||
result.skippedFiles.push(localFilePath);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Write remote content to local file (remote-first)
|
||||
await fs.writeFile(localFilePath, content, 'utf-8');
|
||||
result.syncedFiles.push(localFilePath);
|
||||
} catch (error: unknown) {
|
||||
const message = error instanceof Error ? error.message : String(error);
|
||||
result.errors.push(`${file}: ${message}`);
|
||||
result.success = false;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file exists
|
||||
*/
|
||||
private async fileExists(filePath: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(filePath);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List available config files from remote directory
|
||||
* @param configDir - Config directory name
|
||||
* @param options - GitHub options
|
||||
* @returns List of available files
|
||||
*/
|
||||
async listRemoteFiles(
|
||||
configDir: string,
|
||||
options: Partial<Pick<ConfigSyncOptions, 'owner' | 'repo' | 'branch'>> = {}
|
||||
): Promise<string[]> {
|
||||
const {
|
||||
owner = DEFAULT_GITHUB_CONFIG.owner,
|
||||
repo = DEFAULT_GITHUB_CONFIG.repo,
|
||||
branch = DEFAULT_GITHUB_CONFIG.branch,
|
||||
} = options;
|
||||
|
||||
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
|
||||
const availableFiles: string[] = [];
|
||||
|
||||
for (const file of COMMON_CONFIG_FILES) {
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/${file}`);
|
||||
if (response.ok) {
|
||||
availableFiles.push(file);
|
||||
}
|
||||
} catch {
|
||||
// File doesn't exist or network error, skip
|
||||
}
|
||||
}
|
||||
|
||||
return availableFiles;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get sync status - compare local and remote files
|
||||
* @param options - Sync options
|
||||
* @returns Status comparison result
|
||||
*/
|
||||
async getSyncStatus(options: ConfigSyncOptions = {}): Promise<{
|
||||
localOnly: string[];
|
||||
remoteOnly: string[];
|
||||
synced: string[];
|
||||
}> {
|
||||
const {
|
||||
owner = DEFAULT_GITHUB_CONFIG.owner,
|
||||
repo = DEFAULT_GITHUB_CONFIG.repo,
|
||||
branch = DEFAULT_GITHUB_CONFIG.branch,
|
||||
configDirs = DEFAULT_CONFIG_DIRS,
|
||||
baseDir = join(homedir(), '.ccw'),
|
||||
} = options;
|
||||
|
||||
// SECURITY: Validate inputs
|
||||
try {
|
||||
validateGitHubParams({ owner, repo, branch });
|
||||
validateConfigDirs(configDirs);
|
||||
} catch (error) {
|
||||
throw error; // Re-throw validation errors
|
||||
}
|
||||
|
||||
const status = {
|
||||
localOnly: [] as string[],
|
||||
remoteOnly: [] as string[],
|
||||
synced: [] as string[],
|
||||
};
|
||||
|
||||
for (const configDir of configDirs) {
|
||||
const localPath = join(baseDir, configDir);
|
||||
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
|
||||
|
||||
const remoteFiles = await this.listRemoteFiles(configDir, { owner, repo, branch });
|
||||
const localFiles = await this.listLocalFiles(localPath);
|
||||
|
||||
for (const file of remoteFiles) {
|
||||
const localFilePath = join(localPath, file);
|
||||
if (localFiles.includes(file)) {
|
||||
status.synced.push(localFilePath);
|
||||
} else {
|
||||
status.remoteOnly.push(localFilePath);
|
||||
}
|
||||
}
|
||||
|
||||
for (const file of localFiles) {
|
||||
if (!remoteFiles.includes(file)) {
|
||||
status.localOnly.push(join(localPath, file));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return status;
|
||||
}
|
||||
|
||||
/**
|
||||
* List files in a local directory
|
||||
*/
|
||||
private async listLocalFiles(dirPath: string): Promise<string[]> {
|
||||
try {
|
||||
const files = await fs.readdir(dirPath);
|
||||
return files.filter(file => COMMON_CONFIG_FILES.includes(file));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get singleton instance of ConfigSyncService
|
||||
*/
|
||||
let configSyncServiceInstance: ConfigSyncService | null = null;
|
||||
|
||||
export function getConfigSyncService(): ConfigSyncService {
|
||||
if (!configSyncServiceInstance) {
|
||||
configSyncServiceInstance = new ConfigSyncService();
|
||||
}
|
||||
return configSyncServiceInstance;
|
||||
}
|
||||
174
ccw/src/core/services/version-checker.ts
Normal file
174
ccw/src/core/services/version-checker.ts
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* Version Checker Service
|
||||
* Checks application version against GitHub latest release
|
||||
* Uses caching to avoid excessive API calls
|
||||
*/
|
||||
|
||||
import { readFile } from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { existsSync } from 'fs';
|
||||
|
||||
/**
|
||||
* Version check result
|
||||
*/
|
||||
export interface VersionCheckResult {
|
||||
currentVersion: string;
|
||||
latestVersion: string;
|
||||
updateAvailable: boolean;
|
||||
changelog?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Version cache entry
|
||||
*/
|
||||
interface CacheEntry {
|
||||
data: VersionCheckResult;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Version Checker Service
|
||||
* Checks for updates by comparing local version with GitHub releases
|
||||
*/
|
||||
export class VersionChecker {
|
||||
private cache: CacheEntry | null = null;
|
||||
private readonly CACHE_TTL = 5 * 60 * 1000; // 5 minutes
|
||||
private readonly GITHUB_OWNER = 'dyw0830';
|
||||
private readonly GITHUB_REPO = 'ccw';
|
||||
private readonly GITHUB_API_URL = `https://api.github.com/repos/dyw0830/ccw/releases/latest`;
|
||||
|
||||
/**
|
||||
* Check for updates
|
||||
* Returns cached result if within TTL
|
||||
*/
|
||||
async checkVersion(): Promise<VersionCheckResult> {
|
||||
// Check cache first
|
||||
if (this.cache && Date.now() - this.cache.timestamp < this.CACHE_TTL) {
|
||||
return this.cache.data;
|
||||
}
|
||||
|
||||
// Get versions
|
||||
const currentVersion = await this.getLocalVersion();
|
||||
const latestVersion = await this.getLatestVersionFromGitHub();
|
||||
|
||||
const result: VersionCheckResult = {
|
||||
currentVersion,
|
||||
latestVersion,
|
||||
updateAvailable: this.compareVersions(currentVersion, latestVersion) < 0
|
||||
};
|
||||
|
||||
// Cache result
|
||||
this.cache = { data: result, timestamp: Date.now() };
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get local version from package.json
|
||||
* Searches in monorepo root and ccw package directories
|
||||
*/
|
||||
private async getLocalVersion(): Promise<string> {
|
||||
// Try to find package.json with actual CCW version
|
||||
const possiblePaths = [
|
||||
join(process.cwd(), 'package.json'), // Current directory
|
||||
join(process.cwd(), 'ccw', 'package.json'), // ccw subdirectory
|
||||
join(__dirname, '..', '..', '..', '..', 'package.json'), // From src/core/services -> monorepo root
|
||||
];
|
||||
|
||||
for (const pkgPath of possiblePaths) {
|
||||
if (existsSync(pkgPath)) {
|
||||
try {
|
||||
const content = await readFile(pkgPath, 'utf-8');
|
||||
const pkg = JSON.parse(content);
|
||||
if (pkg.version && typeof pkg.version === 'string') {
|
||||
return pkg.version;
|
||||
}
|
||||
} catch {
|
||||
// Continue to next path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback to a default version if no package.json found
|
||||
return '0.0.0';
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch latest version from GitHub Releases API
|
||||
* Returns cached data if available even if expired, on error
|
||||
*/
|
||||
private async getLatestVersionFromGitHub(): Promise<string> {
|
||||
try {
|
||||
// Create abort controller for timeout
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10 second timeout
|
||||
|
||||
const response = await fetch(this.GITHUB_API_URL, {
|
||||
headers: {
|
||||
'Accept': 'application/vnd.github.v3+json',
|
||||
'User-Agent': 'CCW-VersionChecker', // REQUIRED by GitHub API
|
||||
},
|
||||
signal: controller.signal,
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
// Validate response structure
|
||||
const data = await response.json() as { tag_name?: string };
|
||||
|
||||
if (!data || typeof data !== 'object') {
|
||||
throw new Error('Invalid GitHub API response format');
|
||||
}
|
||||
|
||||
if (!data.tag_name || typeof data.tag_name !== 'string') {
|
||||
throw new Error('Invalid tag_name in GitHub response');
|
||||
}
|
||||
|
||||
// Extract version from tag_name (remove 'v' prefix if present)
|
||||
const tagName = data.tag_name;
|
||||
return tagName.startsWith('v') ? tagName.substring(1) : tagName;
|
||||
} catch (error) {
|
||||
// Handle abort (timeout)
|
||||
if (error instanceof Error && error.name === 'AbortError') {
|
||||
throw new Error('GitHub API request timeout (10s)');
|
||||
}
|
||||
|
||||
// Return cached data if available, even if expired
|
||||
if (this.cache) {
|
||||
console.warn(`[VersionChecker] Using cached version due to error: ${(error as Error).message}`);
|
||||
return this.cache.data.latestVersion;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare two semantic version strings
|
||||
* Returns: -1 if v1 < v2, 0 if v1 == v2, 1 if v1 > v2
|
||||
*/
|
||||
private compareVersions(v1: string, v2: string): number {
|
||||
// Parse semver versions (major.minor.patch)
|
||||
const parts1 = v1.split('.').map((p) => (parseInt(p, 10) || 0));
|
||||
const parts2 = v2.split('.').map((p) => (parseInt(p, 10) || 0));
|
||||
|
||||
// Ensure we have at least 3 parts for comparison
|
||||
while (parts1.length < 3) parts1.push(0);
|
||||
while (parts2.length < 3) parts2.push(0);
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
if (parts1[i] < parts2[i]) return -1;
|
||||
if (parts1[i] > parts2[i]) return 1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the version cache (useful for testing or manual refresh)
|
||||
*/
|
||||
clearCache(): void {
|
||||
this.cache = null;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user