feat: add configuration backup, sync, and version checker services

- Implemented ConfigBackupService for backing up local configuration files.
- Added ConfigSyncService to download configuration files from GitHub with remote-first conflict resolution.
- Created VersionChecker to check application version against the latest GitHub release with caching.
- Introduced security validation utilities for input validation to prevent common vulnerabilities.
- Developed utility functions to start and stop Docusaurus documentation server.
This commit is contained in:
catlog22
2026-02-05 17:32:31 +08:00
parent 834951a08d
commit 5cfeb59124
265 changed files with 8714 additions and 1408 deletions

View File

@@ -2,6 +2,7 @@ import { startServer } from '../core/server.js';
import { launchBrowser } from '../utils/browser-launcher.js';
import { resolvePath, validatePath } from '../utils/path-resolver.js';
import { startReactFrontend, stopReactFrontend } from '../utils/react-frontend.js';
import { startDocsSite, stopDocsSite } from '../utils/docs-frontend.js';
import chalk from 'chalk';
import type { Server } from 'http';
@@ -53,6 +54,18 @@ export async function serveCommand(options: ServeOptions): Promise<void> {
}
}
// Start Docusaurus docs site if React frontend is enabled
// The docs site is proxied through Vite at /docs endpoint
if (frontend === 'react' || frontend === 'both') {
try {
await startDocsSite(3001);
} catch (error) {
console.log(chalk.yellow(`\n Warning: Failed to start docs site: ${error}`));
console.log(chalk.gray(` The /docs endpoint will not be available.`));
console.log(chalk.gray(` You can start it manually: cd docs-site && npm start\n`));
}
}
try {
// Start server
console.log(chalk.cyan(' Starting server...'));
@@ -78,8 +91,10 @@ export async function serveCommand(options: ServeOptions): Promise<void> {
if (frontend === 'both') {
console.log(chalk.gray(` JS Frontend: ${boundUrl}`));
console.log(chalk.gray(` React Frontend: http://${host}:${reactPort}`));
console.log(chalk.gray(` Docs: http://${host}:${reactPort}/docs/`));
} else if (frontend === 'react') {
console.log(chalk.gray(` React Frontend: http://${host}:${reactPort}`));
console.log(chalk.gray(` Docs: http://${host}:${reactPort}/docs/`));
}
// Open browser
@@ -113,6 +128,7 @@ export async function serveCommand(options: ServeOptions): Promise<void> {
process.on('SIGINT', async () => {
console.log(chalk.yellow('\n Shutting down server...'));
await stopReactFrontend();
await stopDocsSite();
server.close(() => {
console.log(chalk.green(' Server stopped.\n'));
process.exit(0);
@@ -127,6 +143,7 @@ export async function serveCommand(options: ServeOptions): Promise<void> {
console.error(chalk.gray(` Try a different port: ccw serve --port ${port + 1}\n`));
}
await stopReactFrontend();
await stopDocsSite();
process.exit(1);
}
}

View File

@@ -0,0 +1,323 @@
/**
* Config Routes Module
* HTTP API endpoints for configuration backup and synchronization from GitHub
*
* Backup Endpoints:
* - POST /api/config/backup - Create backup
* - GET /api/config/backups - List backups
* - DELETE /api/config/backup/:name - Delete backup
* - POST /api/config/backup/:name/restore - Restore backup
*
* Sync Endpoints:
* - POST /api/config/sync - Sync config files from GitHub (remote-first)
* - GET /api/config/status - Get sync status (local vs remote comparison)
* - GET /api/config/remote - List available remote config files
*/
import type { RouteContext } from './types.js';
import { ConfigBackupService } from '../services/config-backup.js';
import { getConfigSyncService } from '../services/config-sync.js';
import { isValidBackupName, validateConfigDirs, validateGitHubParams } from '../../utils/security-validation.js';
/**
* Handle config routes
* @returns true if route was handled, false otherwise
*/
export async function handleConfigRoutes(ctx: RouteContext): Promise<boolean> {
const { pathname, req, res, handlePostRequest, broadcastToClients } = ctx;
// ========== CREATE BACKUP ==========
// POST /api/config/backup
if (pathname === '/api/config/backup' && req.method === 'POST') {
handlePostRequest(req, res, async (body: unknown) => {
try {
const { configDirs, backupName } = body as { configDirs?: string[]; backupName?: string };
// SECURITY: Validate inputs
if (backupName && !isValidBackupName(backupName)) {
return {
success: false,
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
};
}
if (configDirs) {
try {
validateConfigDirs(configDirs);
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : String(error)
};
}
}
const backupService = new ConfigBackupService();
const result = await backupService.createBackup({ configDirs, backupName });
if (result.success) {
// Broadcast backup created event
broadcastToClients({
type: 'CONFIG_BACKUP_CREATED',
payload: {
backupPath: result.backupPath,
fileCount: result.fileCount,
timestamp: new Date().toISOString()
}
});
}
return result;
} catch (err) {
return { success: false, error: (err as Error).message, fileCount: 0 };
}
});
return true;
}
// ========== LIST BACKUPS ==========
// GET /api/config/backups
if (pathname === '/api/config/backups' && req.method === 'GET') {
try {
const backupService = new ConfigBackupService();
const backups = await backupService.listBackups();
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: true, data: backups }));
} catch (err) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
}
return true;
}
// ========== DELETE BACKUP ==========
// DELETE /api/config/backup/:name
const deleteMatch = pathname.match(/^\/api\/config\/backup\/([^/]+)$/);
if (deleteMatch && req.method === 'DELETE') {
const backupName = deleteMatch[1];
// SECURITY: Validate backup name to prevent path traversal
if (!isValidBackupName(backupName)) {
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
}));
return true;
}
try {
const backupService = new ConfigBackupService();
const result = await backupService.deleteBackup(backupName);
if (result.success) {
// Broadcast backup deleted event
broadcastToClients({
type: 'CONFIG_BACKUP_DELETED',
payload: {
backupName,
timestamp: new Date().toISOString()
}
});
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(result));
} else {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(result));
}
} catch (err) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
}
return true;
}
// ========== RESTORE BACKUP ==========
// POST /api/config/backup/:name/restore
const restoreMatch = pathname.match(/^\/api\/config\/backup\/([^/]+)\/restore$/);
if (restoreMatch && req.method === 'POST') {
const backupName = restoreMatch[1];
// SECURITY: Validate backup name to prevent path traversal
if (!isValidBackupName(backupName)) {
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: 'Invalid backup name. Only alphanumeric, hyphen, underscore, and dot characters are allowed.'
}));
return true;
}
try {
const backupService = new ConfigBackupService();
const result = await backupService.restoreBackup(backupName);
if (result.success) {
// Broadcast backup restored event
broadcastToClients({
type: 'CONFIG_BACKUP_RESTORED',
payload: {
backupName,
timestamp: new Date().toISOString()
}
});
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(result));
} else {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(result));
}
} catch (err) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: false, error: (err as Error).message }));
}
return true;
}
// ========== SYNC CONFIG FROM GITHUB ==========
// POST /api/config/sync - Sync config files from GitHub (remote-first)
if (pathname === '/api/config/sync' && req.method === 'POST') {
handlePostRequest(req, res, async (body) => {
const { owner, repo, branch, configDirs, baseDir, overwrite } = body as {
owner?: string;
repo?: string;
branch?: string;
configDirs?: string[];
baseDir?: string;
overwrite?: boolean;
};
// SECURITY: Validate GitHub parameters (SSRF protection)
try {
validateGitHubParams({ owner, repo, branch });
// Validate config directories (path traversal protection)
if (configDirs) {
validateConfigDirs(configDirs);
}
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : String(error)
};
}
const syncService = getConfigSyncService();
const result = await syncService.syncConfig({
owner,
repo,
branch,
configDirs,
baseDir,
overwrite: overwrite !== false, // default true
});
// Broadcast to connected dashboard clients on success
if (result.success && broadcastToClients) {
broadcastToClients({
type: 'CONFIG_SYNCED',
payload: {
syncedFiles: result.syncedFiles,
skippedFiles: result.skippedFiles,
timestamp: new Date().toISOString(),
},
});
}
return result;
});
return true;
}
// ========== GET SYNC STATUS ==========
// GET /api/config/status - Get sync status (local vs remote comparison)
if (pathname === '/api/config/status' && req.method === 'GET') {
try {
const url = ctx.url;
const owner = url.searchParams.get('owner') || undefined;
const repo = url.searchParams.get('repo') || undefined;
const branch = url.searchParams.get('branch') || undefined;
const configDirsParam = url.searchParams.get('configDirs');
const configDirs = configDirsParam ? configDirsParam.split(',') : undefined;
const baseDir = url.searchParams.get('baseDir') || undefined;
// SECURITY: Validate inputs
validateGitHubParams({ owner, repo, branch });
if (configDirs) {
validateConfigDirs(configDirs);
}
const syncService = getConfigSyncService();
const status = await syncService.getSyncStatus({
owner,
repo,
branch,
configDirs,
baseDir,
});
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: true,
data: status,
timestamp: new Date().toISOString(),
}));
return true;
} catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error);
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: message,
}));
return true;
}
}
// ========== LIST REMOTE CONFIG FILES ==========
// GET /api/config/remote - List available remote config files
if (pathname === '/api/config/remote' && req.method === 'GET') {
try {
const url = ctx.url;
const owner = url.searchParams.get('owner') || undefined;
const repo = url.searchParams.get('repo') || undefined;
const branch = url.searchParams.get('branch') || undefined;
const configDir = url.searchParams.get('configDir') || '.claude';
// SECURITY: Validate inputs
validateGitHubParams({ owner, repo, branch });
validateConfigDirs([configDir]); // Single dir validation
const syncService = getConfigSyncService();
const files = await syncService.listRemoteFiles(configDir, {
owner,
repo,
branch,
});
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: true,
data: {
configDir,
files,
},
timestamp: new Date().toISOString(),
}));
return true;
} catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error);
res.writeHead(400, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({
success: false,
error: message,
}));
return true;
}
}
return false;
}

View File

@@ -24,6 +24,9 @@
* - POST /api/orchestrator/templates/install - Install template from URL or GitHub
* - DELETE /api/orchestrator/templates/:id - Delete local template
* - POST /api/orchestrator/templates/export - Export flow as template
*
* Configuration Endpoints:
* - GET /api/config/version - Check application version against GitHub
*/
import { join, dirname } from 'path';
@@ -1732,5 +1735,24 @@ export async function handleOrchestratorRoutes(ctx: RouteContext): Promise<boole
}
}
// ==== VERSION CHECK ====
// GET /api/config/version
// Check application version against GitHub latest release
if (pathname === '/api/config/version' && req.method === 'GET') {
try {
const { VersionChecker } = await import('../services/version-checker.js');
const checker = new VersionChecker();
const result = await checker.checkVersion();
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: true, data: result }));
return true;
} catch (error: any) {
res.writeHead(500, { 'Content-Type': 'application/json' });
res.end(JSON.stringify({ success: false, error: error.message }));
return true;
}
}
return false;
}

View File

@@ -36,6 +36,7 @@ import { handleTestLoopRoutes } from './routes/test-loop-routes.js';
import { handleTaskRoutes } from './routes/task-routes.js';
import { handleDashboardRoutes } from './routes/dashboard-routes.js';
import { handleOrchestratorRoutes } from './routes/orchestrator-routes.js';
import { handleConfigRoutes } from './routes/config-routes.js';
// Import WebSocket handling
import { handleWebSocketUpgrade, broadcastToClients, extractSessionIdFromPath } from './websocket.js';
@@ -452,6 +453,7 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
console.log(`[Server] Frontend mode: ${frontend}`);
if (frontend === 'react' || frontend === 'both') {
console.log(`[Server] React proxy configured: /react/* -> http://localhost:${reactPort}`);
console.log(`[Server] Docs proxy configured: /docs/* -> http://localhost:3001`);
}
const tokenManager = getTokenManager();
@@ -653,6 +655,11 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
if (await handleOrchestratorRoutes(routeContext)) return;
}
// Config routes (/api/config/*)
if (pathname.startsWith('/api/config/')) {
if (await handleConfigRoutes(routeContext)) return;
}
// Loop V2 routes (/api/loops/v2/*) - must be checked before v1
if (pathname.startsWith('/api/loops/v2')) {
if (await handleLoopV2Routes(routeContext)) return;
@@ -820,6 +827,69 @@ export async function startServer(options: ServerOptions = {}): Promise<http.Ser
}
}
// Docs site proxy - proxy requests to Docusaurus dev server (port 3001)
// Redirect /docs to /docs/ to match Docusaurus baseUrl
if (pathname === '/docs') {
res.writeHead(302, { 'Location': `/docs/${url.search}` });
res.end();
return;
}
// Proxy /docs/* requests to Docusaurus
if (pathname.startsWith('/docs/')) {
const docsPort = 3001;
// Preserve the /docs prefix when forwarding to Docusaurus
const docsUrl = `http://localhost:${docsPort}${pathname}${url.search}`;
console.log(`[Docs Proxy] Proxying ${pathname} -> ${docsUrl}`);
try {
// Convert headers to plain object for fetch
const proxyHeaders: Record<string, string> = {};
for (const [key, value] of Object.entries(req.headers)) {
if (typeof value === 'string') {
proxyHeaders[key] = value;
} else if (Array.isArray(value)) {
proxyHeaders[key] = value.join(', ');
}
}
proxyHeaders['host'] = `localhost:${docsPort}`;
const docsResponse = await fetch(docsUrl, {
method: req.method,
headers: proxyHeaders,
body: req.method !== 'GET' && req.method !== 'HEAD' ? await readRequestBody(req) : undefined,
});
const contentType = docsResponse.headers.get('content-type') || 'text/html';
const body = await docsResponse.text();
// Forward response headers
const responseHeaders: Record<string, string> = {
'Content-Type': contentType,
'Cache-Control': 'no-cache',
};
// Forward Set-Cookie headers if present
const setCookieHeaders = docsResponse.headers.get('set-cookie');
if (setCookieHeaders) {
responseHeaders['Set-Cookie'] = setCookieHeaders;
}
console.log(`[Docs Proxy] Response ${docsResponse.status}: ${contentType}`);
res.writeHead(docsResponse.status, responseHeaders);
res.end(body);
return;
} catch (err) {
console.error(`[Docs Proxy] Failed to proxy to ${docsUrl}:`, err);
console.error(`[Docs Proxy] Error details:`, (err as Error).message);
res.writeHead(502, { 'Content-Type': 'text/plain' });
res.end(`Bad Gateway: Docs site not available at ${docsUrl}\nMake sure the Docusaurus server is running on port ${docsPort}.\nError: ${(err as Error).message}`);
return;
}
}
// Root path - serve JS frontend HTML (default or both mode)
if (pathname === '/' || pathname === '/index.html') {
const html = generateServerDashboard(initialPath);

View File

@@ -0,0 +1,212 @@
/**
* Config Backup Service
* Handles backup of local configuration files (.claude, .codex, .gemini, .qwen)
*/
import { mkdir, readdir, stat, copyFile, rm, access } from 'fs/promises';
import { join } from 'path';
import { homedir } from 'os';
export interface BackupOptions {
/** Configuration directories to backup (default: ['.claude']) */
configDirs?: string[];
/** Custom backup name (default: auto-generated timestamp) */
backupName?: string;
}
export interface BackupResult {
success: boolean;
backupPath?: string;
fileCount: number;
error?: string;
}
export interface BackupInfo {
name: string;
path: string;
createdAt: Date;
fileCount: number;
}
export class ConfigBackupService {
private ccwDir: string;
private backupDir: string;
constructor() {
this.ccwDir = join(homedir(), '.ccw');
this.backupDir = join(this.ccwDir, 'backups');
}
/**
* Create a backup of configuration directories
* @param options - Backup options
* @returns Backup result with path and file count
*/
async createBackup(options: BackupOptions = {}): Promise<BackupResult> {
const { configDirs = ['.claude'], backupName } = options;
try {
// Create backup directory
await mkdir(this.backupDir, { recursive: true });
// Generate backup name
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').substring(0, 19);
const backupNameFinal = backupName || `backup-${timestamp}`;
const backupPath = join(this.backupDir, backupNameFinal);
await mkdir(backupPath, { recursive: true });
let fileCount = 0;
// Backup each config directory
for (const configDir of configDirs) {
const sourcePath = join(this.ccwDir, configDir);
const targetPath = join(backupPath, configDir);
// Check if source exists
try {
await access(sourcePath);
} catch {
continue; // Skip if doesn't exist
}
// Copy directory recursively
await this.copyDirectory(sourcePath, targetPath);
// Count files
const files = await this.countFiles(targetPath);
fileCount += files;
}
return {
success: true,
backupPath,
fileCount
};
} catch (error: unknown) {
return {
success: false,
error: (error as Error).message,
fileCount: 0
};
}
}
/**
* List all available backups
* @returns Array of backup information sorted by creation date (newest first)
*/
async listBackups(): Promise<BackupInfo[]> {
try {
const entries = await readdir(this.backupDir, { withFileTypes: true });
const backups: BackupInfo[] = [];
for (const entry of entries) {
if (entry.isDirectory()) {
const backupPath = join(this.backupDir, entry.name);
const stats = await stat(backupPath);
const fileCount = await this.countFiles(backupPath);
backups.push({
name: entry.name,
path: backupPath,
createdAt: stats.mtime,
fileCount
});
}
}
return backups.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
} catch {
return [];
}
}
/**
* Delete a specific backup
* @param backupName - Name of the backup to delete
* @returns Success status
*/
async deleteBackup(backupName: string): Promise<{ success: boolean; error?: string }> {
try {
const backupPath = join(this.backupDir, backupName);
await rm(backupPath, { recursive: true, force: true });
return { success: true };
} catch (error: unknown) {
return {
success: false,
error: (error as Error).message
};
}
}
/**
* Restore a backup to the original location
* @param backupName - Name of the backup to restore
* @returns Success status
*/
async restoreBackup(backupName: string): Promise<{ success: boolean; error?: string }> {
try {
const backupPath = join(this.backupDir, backupName);
const entries = await readdir(backupPath, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory()) {
const sourcePath = join(backupPath, entry.name);
const targetPath = join(this.ccwDir, entry.name);
// Copy directory back to original location
await this.copyDirectory(sourcePath, targetPath);
}
}
return { success: true };
} catch (error: unknown) {
return {
success: false,
error: (error as Error).message
};
}
}
/**
* Copy directory recursively
* @param src - Source directory path
* @param dest - Destination directory path
*/
private async copyDirectory(src: string, dest: string): Promise<void> {
await mkdir(dest, { recursive: true });
const entries = await readdir(src, { withFileTypes: true });
for (const entry of entries) {
const srcPath = join(src, entry.name);
const destPath = join(dest, entry.name);
if (entry.isDirectory()) {
await this.copyDirectory(srcPath, destPath);
} else {
await copyFile(srcPath, destPath);
}
}
}
/**
* Count files in a directory recursively
* @param dir - Directory path
* @returns Number of files
*/
private async countFiles(dir: string): Promise<number> {
let count = 0;
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (entry.isDirectory()) {
count += await this.countFiles(join(dir, entry.name));
} else {
count++;
}
}
return count;
}
}

View File

@@ -0,0 +1,330 @@
/**
* Config Sync Service
* Downloads configuration files from GitHub using remote-first conflict resolution
*/
import { promises as fs } from 'fs';
import { join } from 'path';
import { homedir } from 'os';
import {
validateConfigDirs,
validateGitHubParams,
VALID_CONFIG_DIRS,
type ValidConfigDir
} from '../../utils/security-validation.js';
/**
* Default GitHub repository configuration for remote config sync
*/
const DEFAULT_GITHUB_CONFIG = {
owner: 'dyw0830',
repo: 'ccw',
branch: 'main',
};
/**
* Default config directories to sync
* Uses whitelist from security-validation
*/
const DEFAULT_CONFIG_DIRS: ValidConfigDir[] = ['.claude'];
/**
* Common configuration files to sync from each config directory
*/
const COMMON_CONFIG_FILES = [
'settings.json',
'config.json',
'CLAUDE.md',
'cli-tools.json',
'guidelines.json',
'prompts.json',
];
/**
* Sync result interface
*/
export interface ConfigSyncResult {
success: boolean;
syncedFiles: string[];
errors: string[];
skippedFiles: string[];
}
/**
* Config sync options interface
*/
export interface ConfigSyncOptions {
/** GitHub repository owner (default: 'dyw0830') */
owner?: string;
/** GitHub repository name (default: 'ccw') */
repo?: string;
/** Git branch (default: 'main') */
branch?: string;
/** Config directories to sync (default: ['.claude']) */
configDirs?: string[];
/** Target base directory (default: ~/.ccw) */
baseDir?: string;
/** Remote-first: overwrite local files (default: true) */
overwrite?: boolean;
}
/**
* Config Sync Service
* Downloads configuration files from GitHub with remote-first conflict resolution
*/
export class ConfigSyncService {
/**
* Sync configuration files from GitHub
* @param options - Sync options
* @returns Sync result with status, files synced, and any errors
*/
async syncConfig(options: ConfigSyncOptions = {}): Promise<ConfigSyncResult> {
const {
owner = DEFAULT_GITHUB_CONFIG.owner,
repo = DEFAULT_GITHUB_CONFIG.repo,
branch = DEFAULT_GITHUB_CONFIG.branch,
configDirs = DEFAULT_CONFIG_DIRS,
baseDir = join(homedir(), '.ccw'),
overwrite = true,
} = options;
// SECURITY: Validate all inputs before processing
try {
// Validate GitHub parameters (SSRF protection)
validateGitHubParams({ owner, repo, branch });
// Validate config directories (path traversal protection)
validateConfigDirs(configDirs);
} catch (error) {
return {
success: false,
syncedFiles: [],
errors: [error instanceof Error ? error.message : String(error)],
skippedFiles: [],
};
}
const results: ConfigSyncResult = {
success: true,
syncedFiles: [],
errors: [],
skippedFiles: [],
};
for (const configDir of configDirs) {
try {
const dirResult = await this.syncConfigDirectory(configDir, {
owner,
repo,
branch,
baseDir,
overwrite,
});
results.syncedFiles.push(...dirResult.syncedFiles);
results.errors.push(...dirResult.errors);
results.skippedFiles.push(...dirResult.skippedFiles);
if (!dirResult.success) {
results.success = false;
}
} catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error);
results.errors.push(`${configDir}: ${message}`);
results.success = false;
}
}
return results;
}
/**
* Sync a single config directory
*/
private async syncConfigDirectory(
configDir: string,
options: {
owner: string;
repo: string;
branch: string;
baseDir: string;
overwrite: boolean;
}
): Promise<ConfigSyncResult> {
const { owner, repo, branch, baseDir, overwrite } = options;
const result: ConfigSyncResult = {
success: true,
syncedFiles: [],
errors: [],
skippedFiles: [],
};
const localPath = join(baseDir, configDir);
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
// Ensure local directory exists
await fs.mkdir(localPath, { recursive: true });
// Try to sync each common config file
for (const file of COMMON_CONFIG_FILES) {
const fileUrl = `${baseUrl}/${file}`;
const localFilePath = join(localPath, file);
try {
// Check if remote file exists
const response = await fetch(fileUrl);
if (!response.ok) {
// File doesn't exist on remote, skip
continue;
}
const content = await response.text();
// Check if local file exists
const localExists = await this.fileExists(localFilePath);
if (localExists && !overwrite) {
result.skippedFiles.push(localFilePath);
continue;
}
// Write remote content to local file (remote-first)
await fs.writeFile(localFilePath, content, 'utf-8');
result.syncedFiles.push(localFilePath);
} catch (error: unknown) {
const message = error instanceof Error ? error.message : String(error);
result.errors.push(`${file}: ${message}`);
result.success = false;
}
}
return result;
}
/**
* Check if a file exists
*/
private async fileExists(filePath: string): Promise<boolean> {
try {
await fs.access(filePath);
return true;
} catch {
return false;
}
}
/**
* List available config files from remote directory
* @param configDir - Config directory name
* @param options - GitHub options
* @returns List of available files
*/
async listRemoteFiles(
configDir: string,
options: Partial<Pick<ConfigSyncOptions, 'owner' | 'repo' | 'branch'>> = {}
): Promise<string[]> {
const {
owner = DEFAULT_GITHUB_CONFIG.owner,
repo = DEFAULT_GITHUB_CONFIG.repo,
branch = DEFAULT_GITHUB_CONFIG.branch,
} = options;
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
const availableFiles: string[] = [];
for (const file of COMMON_CONFIG_FILES) {
try {
const response = await fetch(`${baseUrl}/${file}`);
if (response.ok) {
availableFiles.push(file);
}
} catch {
// File doesn't exist or network error, skip
}
}
return availableFiles;
}
/**
* Get sync status - compare local and remote files
* @param options - Sync options
* @returns Status comparison result
*/
async getSyncStatus(options: ConfigSyncOptions = {}): Promise<{
localOnly: string[];
remoteOnly: string[];
synced: string[];
}> {
const {
owner = DEFAULT_GITHUB_CONFIG.owner,
repo = DEFAULT_GITHUB_CONFIG.repo,
branch = DEFAULT_GITHUB_CONFIG.branch,
configDirs = DEFAULT_CONFIG_DIRS,
baseDir = join(homedir(), '.ccw'),
} = options;
// SECURITY: Validate inputs
try {
validateGitHubParams({ owner, repo, branch });
validateConfigDirs(configDirs);
} catch (error) {
throw error; // Re-throw validation errors
}
const status = {
localOnly: [] as string[],
remoteOnly: [] as string[],
synced: [] as string[],
};
for (const configDir of configDirs) {
const localPath = join(baseDir, configDir);
const baseUrl = `https://raw.githubusercontent.com/${owner}/${repo}/${branch}/${configDir}`;
const remoteFiles = await this.listRemoteFiles(configDir, { owner, repo, branch });
const localFiles = await this.listLocalFiles(localPath);
for (const file of remoteFiles) {
const localFilePath = join(localPath, file);
if (localFiles.includes(file)) {
status.synced.push(localFilePath);
} else {
status.remoteOnly.push(localFilePath);
}
}
for (const file of localFiles) {
if (!remoteFiles.includes(file)) {
status.localOnly.push(join(localPath, file));
}
}
}
return status;
}
/**
* List files in a local directory
*/
private async listLocalFiles(dirPath: string): Promise<string[]> {
try {
const files = await fs.readdir(dirPath);
return files.filter(file => COMMON_CONFIG_FILES.includes(file));
} catch {
return [];
}
}
}
/**
* Get singleton instance of ConfigSyncService
*/
let configSyncServiceInstance: ConfigSyncService | null = null;
export function getConfigSyncService(): ConfigSyncService {
if (!configSyncServiceInstance) {
configSyncServiceInstance = new ConfigSyncService();
}
return configSyncServiceInstance;
}

View File

@@ -0,0 +1,174 @@
/**
* Version Checker Service
* Checks application version against GitHub latest release
* Uses caching to avoid excessive API calls
*/
import { readFile } from 'fs/promises';
import { join } from 'path';
import { existsSync } from 'fs';
/**
* Version check result
*/
export interface VersionCheckResult {
currentVersion: string;
latestVersion: string;
updateAvailable: boolean;
changelog?: string;
}
/**
* Version cache entry
*/
interface CacheEntry {
data: VersionCheckResult;
timestamp: number;
}
/**
* Version Checker Service
* Checks for updates by comparing local version with GitHub releases
*/
export class VersionChecker {
private cache: CacheEntry | null = null;
private readonly CACHE_TTL = 5 * 60 * 1000; // 5 minutes
private readonly GITHUB_OWNER = 'dyw0830';
private readonly GITHUB_REPO = 'ccw';
private readonly GITHUB_API_URL = `https://api.github.com/repos/dyw0830/ccw/releases/latest`;
/**
* Check for updates
* Returns cached result if within TTL
*/
async checkVersion(): Promise<VersionCheckResult> {
// Check cache first
if (this.cache && Date.now() - this.cache.timestamp < this.CACHE_TTL) {
return this.cache.data;
}
// Get versions
const currentVersion = await this.getLocalVersion();
const latestVersion = await this.getLatestVersionFromGitHub();
const result: VersionCheckResult = {
currentVersion,
latestVersion,
updateAvailable: this.compareVersions(currentVersion, latestVersion) < 0
};
// Cache result
this.cache = { data: result, timestamp: Date.now() };
return result;
}
/**
* Get local version from package.json
* Searches in monorepo root and ccw package directories
*/
private async getLocalVersion(): Promise<string> {
// Try to find package.json with actual CCW version
const possiblePaths = [
join(process.cwd(), 'package.json'), // Current directory
join(process.cwd(), 'ccw', 'package.json'), // ccw subdirectory
join(__dirname, '..', '..', '..', '..', 'package.json'), // From src/core/services -> monorepo root
];
for (const pkgPath of possiblePaths) {
if (existsSync(pkgPath)) {
try {
const content = await readFile(pkgPath, 'utf-8');
const pkg = JSON.parse(content);
if (pkg.version && typeof pkg.version === 'string') {
return pkg.version;
}
} catch {
// Continue to next path
}
}
}
// Fallback to a default version if no package.json found
return '0.0.0';
}
/**
* Fetch latest version from GitHub Releases API
* Returns cached data if available even if expired, on error
*/
private async getLatestVersionFromGitHub(): Promise<string> {
try {
// Create abort controller for timeout
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 10000); // 10 second timeout
const response = await fetch(this.GITHUB_API_URL, {
headers: {
'Accept': 'application/vnd.github.v3+json',
'User-Agent': 'CCW-VersionChecker', // REQUIRED by GitHub API
},
signal: controller.signal,
});
clearTimeout(timeoutId);
if (!response.ok) {
throw new Error(`GitHub API error: ${response.status} ${response.statusText}`);
}
// Validate response structure
const data = await response.json() as { tag_name?: string };
if (!data || typeof data !== 'object') {
throw new Error('Invalid GitHub API response format');
}
if (!data.tag_name || typeof data.tag_name !== 'string') {
throw new Error('Invalid tag_name in GitHub response');
}
// Extract version from tag_name (remove 'v' prefix if present)
const tagName = data.tag_name;
return tagName.startsWith('v') ? tagName.substring(1) : tagName;
} catch (error) {
// Handle abort (timeout)
if (error instanceof Error && error.name === 'AbortError') {
throw new Error('GitHub API request timeout (10s)');
}
// Return cached data if available, even if expired
if (this.cache) {
console.warn(`[VersionChecker] Using cached version due to error: ${(error as Error).message}`);
return this.cache.data.latestVersion;
}
throw error;
}
}
/**
* Compare two semantic version strings
* Returns: -1 if v1 < v2, 0 if v1 == v2, 1 if v1 > v2
*/
private compareVersions(v1: string, v2: string): number {
// Parse semver versions (major.minor.patch)
const parts1 = v1.split('.').map((p) => (parseInt(p, 10) || 0));
const parts2 = v2.split('.').map((p) => (parseInt(p, 10) || 0));
// Ensure we have at least 3 parts for comparison
while (parts1.length < 3) parts1.push(0);
while (parts2.length < 3) parts2.push(0);
for (let i = 0; i < 3; i++) {
if (parts1[i] < parts2[i]) return -1;
if (parts1[i] > parts2[i]) return 1;
}
return 0;
}
/**
* Clear the version cache (useful for testing or manual refresh)
*/
clearCache(): void {
this.cache = null;
}
}

View File

@@ -266,6 +266,21 @@ export class JsonLinesParser implements IOutputParser {
return units;
}
/**
* Debug logging helper for CLI output parsing
* Enable with DEBUG_CLI_OUTPUT=true environment variable
*/
private debugLog(event: string, data: Record<string, unknown>): void {
if (process.env.DEBUG_CLI_OUTPUT) {
const logEntry = {
ts: new Date().toISOString(),
event,
...data
};
console.error(`[CLI_OUTPUT_DEBUG] ${JSON.stringify(logEntry)}`);
}
}
/**
* Map parsed JSON object to appropriate IR type
* Handles various JSON event formats from different CLI tools:
@@ -275,6 +290,7 @@ export class JsonLinesParser implements IOutputParser {
* - OpenCode CLI: --format json (step_start, text, step_finish)
*/
private mapJsonToIR(json: any, fallbackStreamType: 'stdout' | 'stderr'): CliOutputUnit | null {
this.debugLog('mapJsonToIR_input', { type: json.type, role: json.role, keys: Object.keys(json) });
// Handle numeric timestamp (milliseconds) from OpenCode
const timestamp = typeof json.timestamp === 'number'
? new Date(json.timestamp).toISOString()
@@ -772,6 +788,14 @@ export class JsonLinesParser implements IOutputParser {
// Default: treat as stdout/stderr based on fallback
if (json.content || json.message || json.text) {
this.debugLog('mapJsonToIR_fallback_stdout', {
type: json.type,
fallbackType: fallbackStreamType,
hasContent: !!json.content,
hasMessage: !!json.message,
hasText: !!json.text,
contentPreview: (json.content || json.message || json.text || '').substring(0, 100)
});
return {
type: fallbackStreamType,
content: json.content || json.message || json.text,
@@ -780,6 +804,7 @@ export class JsonLinesParser implements IOutputParser {
}
// Unrecognized structure, return as metadata
this.debugLog('mapJsonToIR_fallback_metadata', { type: json.type, keys: Object.keys(json) });
return {
type: 'metadata',
content: json,
@@ -1171,6 +1196,41 @@ export function createOutputParser(format: 'text' | 'json-lines'): IOutputParser
// ========== Utility Functions ==========
/**
* Find the start index of the last streaming_content group
* Groups are separated by non-streaming events (tool_call, metadata, etc.)
* This helps filter out intermediate assistant messages in multi-turn executions
*
* @param units - All output units
* @returns Index of the last streaming_content group start
*/
function findLastStreamingGroup(units: CliOutputUnit[]): number {
let lastGroupStart = 0;
for (let i = units.length - 1; i >= 0; i--) {
const unit = units[i];
// streaming_content found, this could be part of the last group
if (unit.type === 'streaming_content') {
lastGroupStart = i;
// Look backwards to find the start of this group
// (first streaming_content after a non-streaming event)
for (let j = i - 1; j >= 0; j--) {
if (units[j].type === 'streaming_content') {
lastGroupStart = j;
} else {
// Found a separator (tool_call, metadata, etc.)
break;
}
}
break;
}
}
return lastGroupStart;
}
/**
* Flatten output units into plain text string
* Useful for Resume scenario where we need concatenated context
@@ -1197,12 +1257,23 @@ export function flattenOutputUnits(
stripCommandJsonBlocks = false
} = options || {};
// Debug logging for output unit analysis
if (process.env.DEBUG_CLI_OUTPUT) {
const typeCounts: Record<string, number> = {};
for (const u of units) {
typeCounts[u.type] = (typeCounts[u.type] || 0) + 1;
}
console.error(`[CLI_OUTPUT_DEBUG] flattenOutputUnits_input: ${JSON.stringify({ unitCount: units.length, typeCounts, includeTypes, excludeTypes })}`);
}
// Special handling for streaming_content: concatenate all into a single agent_message unit
// Gemini delta messages are incremental (each contains partial content to append)
let processedUnits = units;
const streamingUnits = units.filter(u => u.type === 'streaming_content');
const agentMessages = units.filter(u => u.type === 'agent_message');
if (streamingUnits.length > 0) {
const hasAgentMessage = units.some(u => u.type === 'agent_message');
const hasAgentMessage = agentMessages.length > 0;
// If a non-delta final agent_message already exists, prefer it and simply drop streaming_content.
// This avoids duplicated final output when providers emit BOTH streaming deltas and a final message frame.
@@ -1210,18 +1281,38 @@ export function flattenOutputUnits(
// If no agent_message exists, synthesize one from streaming_content (delta-only streams).
if (!hasAgentMessage) {
const concatenatedContent = streamingUnits
// For multi-turn executions, only keep the LAST group of streaming_content
// (separated by tool_call/tool_result/metadata events)
// This filters out intermediate planning/status messages
const lastGroupStartIndex = findLastStreamingGroup(units);
const lastGroupStreamingUnits = streamingUnits.filter((_, idx) => {
const unitIndex = units.indexOf(streamingUnits[idx]);
return unitIndex >= lastGroupStartIndex;
});
const concatenatedContent = lastGroupStreamingUnits
.map(u => typeof u.content === 'string' ? u.content : '')
.join('');
processedUnits.push({
type: 'agent_message',
content: concatenatedContent,
timestamp: streamingUnits[streamingUnits.length - 1].timestamp
});
if (concatenatedContent) {
processedUnits.push({
type: 'agent_message',
content: concatenatedContent,
timestamp: lastGroupStreamingUnits[lastGroupStreamingUnits.length - 1].timestamp
});
}
}
}
// For multi-turn executions with multiple agent_message units (Codex/Claude),
// only keep the LAST agent_message (final result)
if (agentMessages.length > 1) {
const lastAgentMessage = agentMessages[agentMessages.length - 1];
processedUnits = processedUnits.filter(u =>
u.type !== 'agent_message' || u === lastAgentMessage
);
}
// Filter units by type
let filtered = processedUnits;
if (includeTypes && includeTypes.length > 0) {
@@ -1231,6 +1322,15 @@ export function flattenOutputUnits(
filtered = filtered.filter(u => !excludeTypes.includes(u.type));
}
// Debug logging for filtered output
if (process.env.DEBUG_CLI_OUTPUT) {
const filteredTypeCounts: Record<string, number> = {};
for (const u of filtered) {
filteredTypeCounts[u.type] = (filteredTypeCounts[u.type] || 0) + 1;
}
console.error(`[CLI_OUTPUT_DEBUG] flattenOutputUnits_filtered: ${JSON.stringify({ filteredCount: filtered.length, filteredTypeCounts })}`);
}
// Convert to text
const lines = filtered.map(unit => {
let text = '';

View File

@@ -0,0 +1,231 @@
import { spawn, type ChildProcess } from 'child_process';
import { join, resolve } from 'path';
import { fileURLToPath } from 'url';
import { dirname } from 'path';
import chalk from 'chalk';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
let docsProcess: ChildProcess | null = null;
let docsPort: number | null = null;
// Default Docusaurus port
const DEFAULT_DOCS_PORT = 3001;
/**
* Start Docusaurus documentation development server
* @param port - Port to run Docusaurus server on (default: 3001)
* @returns Promise that resolves when server is ready
*/
export async function startDocsSite(port: number = DEFAULT_DOCS_PORT): Promise<void> {
// Check if already running
if (docsProcess && docsPort === port) {
console.log(chalk.yellow(` Docs site already running on port ${port}`));
return;
}
// Try to find docs-site directory (relative to ccw package)
const possiblePaths = [
join(__dirname, '../../docs-site'), // From dist/utils
join(__dirname, '../docs-site'), // From src/utils (dev)
join(process.cwd(), 'docs-site'), // Current working directory
];
let docsDir: string | null = null;
for (const path of possiblePaths) {
const resolvedPath = resolve(path);
try {
const { existsSync } = await import('fs');
if (existsSync(resolvedPath)) {
docsDir = resolvedPath;
break;
}
} catch {
// Continue to next path
}
}
if (!docsDir) {
console.log(chalk.yellow(` Docs site directory not found. Skipping docs server startup.`));
console.log(chalk.gray(` The /docs endpoint will not be available.`));
return;
}
console.log(chalk.cyan(` Starting Docusaurus docs site on port ${port}...`));
console.log(chalk.gray(` Docs dir: ${docsDir}`));
// Check if package.json exists and has start script
const packageJsonPath = join(docsDir, 'package.json');
try {
const { readFileSync, existsSync } = await import('fs');
if (!existsSync(packageJsonPath)) {
throw new Error('package.json not found in docs-site directory');
}
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
if (!packageJson.scripts?.start) {
throw new Error('No "start" script found in package.json');
}
} catch (error) {
console.log(chalk.yellow(` Failed to validate docs-site setup: ${error}`));
console.log(chalk.gray(` Skipping docs server startup.`));
return;
}
// Spawn Docusaurus dev server
// Use npm run start with PORT environment variable for cross-platform compatibility
// On Windows with shell: true, we need to pass arguments differently
const cmd = process.platform === 'win32'
? `npm start`
: `npm start`;
docsProcess = spawn(cmd, [], {
cwd: docsDir,
stdio: 'pipe',
shell: true,
env: {
...process.env,
// Set PORT via environment variable (Docusaurus respects this)
PORT: port.toString(),
HOST: 'localhost',
// Docusaurus uses COLUMNS for terminal width
COLUMNS: '80',
}
});
docsPort = port;
// Wait for server to be ready
return new Promise((resolve, reject) => {
let output = '';
let errorOutput = '';
const timeout = setTimeout(() => {
docsProcess?.kill();
reject(new Error(
`Docs site startup timeout (60s).\n` +
`Output: ${output}\n` +
`Errors: ${errorOutput}`
));
}, 60000); // Docusaurus can take longer to start
const cleanup = () => {
clearTimeout(timeout);
docsProcess?.stdout?.removeAllListeners();
docsProcess?.stderr?.removeAllListeners();
};
docsProcess?.stdout?.on('data', (data: Buffer) => {
const chunk = data.toString();
output += chunk;
// Log all Docusaurus output for debugging
console.log(chalk.gray(` Docs: ${chunk.trim()}`));
// Check for ready signals (Docusaurus output format)
if (
chunk.includes('Compiled successfully') ||
chunk.includes('Compiled with warnings') ||
chunk.includes('The server is running at') ||
chunk.includes(`http://localhost:${port}`) ||
(chunk.includes('Docusaurus') && (chunk.includes('started') || chunk.includes('ready'))) ||
chunk.includes('➜') || // Docusaurus uses this in CLI output
chunk.includes('Local:')
) {
cleanup();
console.log(chalk.green(` Docs site ready at http://localhost:${port}/docs/`));
resolve();
}
});
docsProcess?.stderr?.on('data', (data: Buffer) => {
const chunk = data.toString();
errorOutput += chunk;
// Log warnings but don't fail
if (chunk.toLowerCase().includes('warn') || chunk.toLowerCase().includes('warning')) {
console.log(chalk.yellow(` Docs: ${chunk.trim()}`));
}
});
docsProcess?.on('error', (err: Error) => {
cleanup();
reject(err);
});
docsProcess?.on('exit', (code: number | null) => {
if (code !== 0 && code !== null) {
cleanup();
reject(new Error(`Docs process exited with code ${code}. Errors: ${errorOutput}`));
}
});
});
}
/**
* Stop Docusaurus documentation development server
*/
export async function stopDocsSite(): Promise<void> {
if (docsProcess) {
console.log(chalk.yellow(' Stopping docs site...'));
// Try graceful shutdown first
docsProcess.kill('SIGTERM');
// Wait up to 5 seconds for graceful shutdown
await new Promise<void>((resolve) => {
const timeout = setTimeout(() => {
resolve();
}, 5000);
docsProcess?.once('exit', () => {
clearTimeout(timeout);
resolve();
});
});
// Force kill if still running
if (docsProcess && !docsProcess.killed) {
// On Windows with shell: true, we need to kill the entire process group
if (process.platform === 'win32') {
try {
// Use taskkill to forcefully terminate the process tree
const { exec } = await import('child_process');
const pid = docsProcess.pid;
if (pid) {
await new Promise<void>((resolve) => {
exec(`taskkill /F /T /PID ${pid}`, (err) => {
if (err) {
// Fallback to SIGKILL if taskkill fails
docsProcess?.kill('SIGKILL');
}
resolve();
});
});
}
} catch {
// Fallback to SIGKILL
docsProcess.kill('SIGKILL');
}
} else {
docsProcess.kill('SIGKILL');
}
}
// Wait a bit more for force kill to complete
await new Promise(resolve => setTimeout(resolve, 500));
docsProcess = null;
docsPort = null;
}
}
/**
* Get docs site status
* @returns Object with running status and port
*/
export function getDocsSiteStatus(): { running: boolean; port: number | null } {
return {
running: docsProcess !== null && !docsProcess.killed,
port: docsPort
};
}

View File

@@ -0,0 +1,137 @@
/**
* Security utilities for input validation
* Provides validation functions to prevent common security vulnerabilities
*/
/**
* Valid config directory names (whitelist approach)
*/
export const VALID_CONFIG_DIRS = ['.claude', '.codex', '.gemini', '.qwen'] as const;
/**
* Valid config directory name type
*/
export type ValidConfigDir = typeof VALID_CONFIG_DIRS[number];
/**
* Check if a string is a valid config directory name
* Uses whitelist approach for security
*/
export function isValidConfigDirName(name: string): boolean {
// Type guard to ensure name is a string
if (typeof name !== 'string') return false;
// Must start with dot
if (!name.startsWith('.')) return false;
// Must be in whitelist
return VALID_CONFIG_DIRS.includes(name as any);
}
/**
* Check if a string is a valid backup name
* Prevents path traversal attacks
*/
export function isValidBackupName(name: string): boolean {
// Type guard
if (typeof name !== 'string') return false;
// Prevent path traversal
if (name.includes('..')) return false;
if (name.includes('/') || name.includes('\\')) return false;
// Prevent null bytes
if (name.includes('\0')) return false;
// Only allow alphanumeric, hyphen, underscore, dot
const regex = /^[a-zA-Z0-9._-]+$/;
return regex.test(name) && name.length > 0 && name.length <= 100;
}
/**
* Check if a string is a valid GitHub repository identifier
* GitHub repo rules: max 100 chars, alphanumeric, hyphen, underscore, dot
* Cannot start or end with hyphen
*/
export function isValidGitHubIdentifier(name: string): boolean {
// Type guard
if (typeof name !== 'string') return false;
// Length limit
if (name.length === 0 || name.length > 100) return false;
// Cannot start or end with hyphen
if (name.startsWith('-') || name.endsWith('-')) return false;
// Only allowed characters
const regex = /^[a-zA-Z0-9._-]+$/;
return regex.test(name);
}
/**
* Check if a string is a valid Git branch name
* Git branch rules: cannot begin with a dot, cannot contain .. or ~, cannot end with .lock
*/
export function isValidBranchName(name: string): boolean {
// Type guard
if (typeof name !== 'string') return false;
// Cannot be empty
if (name.length === 0) return false;
// Cannot begin with a dot
if (name.startsWith('.')) return false;
// Cannot contain .. or ~ or :
if (name.includes('..') || name.includes('~') || name.includes(':')) return false;
// Cannot end with .lock
if (name.endsWith('.lock')) return false;
// Only allow safe characters (alphanumeric, hyphen, underscore, dot, slash)
const regex = /^[a-zA-Z0-9_./-]+$/;
return regex.test(name);
}
/**
* Validate an array of config directory names
* Throws error if any invalid
*/
export function validateConfigDirs(dirs: string[]): void {
if (!Array.isArray(dirs)) {
throw new Error('configDirs must be an array');
}
for (const dir of dirs) {
if (!isValidConfigDirName(dir)) {
throw new Error(
`Invalid config directory: "${dir}". ` +
`Valid options are: ${VALID_CONFIG_DIRS.join(', ')}`
);
}
}
}
/**
* Validate GitHub sync parameters
* Throws error if any invalid
*/
export function validateGitHubParams(params: {
owner?: string;
repo?: string;
branch?: string;
}): void {
const { owner, repo, branch } = params;
if (owner !== undefined && !isValidGitHubIdentifier(owner)) {
throw new Error(`Invalid GitHub owner identifier: "${owner}"`);
}
if (repo !== undefined && !isValidGitHubIdentifier(repo)) {
throw new Error(`Invalid GitHub repository name: "${repo}"`);
}
if (branch !== undefined && !isValidBranchName(branch)) {
throw new Error(`Invalid branch name: "${branch}"`);
}
}