mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-12 02:37:45 +08:00
- Introduced architecture rules to detect circular dependencies, god classes, layer violations, and mixed concerns. - Added correctness rules focusing on null checks, empty catch blocks, unreachable code, and type coercion. - Implemented performance rules addressing nested loops, synchronous I/O, memory leaks, and unnecessary re-renders in React. - Created readability rules to improve function length, variable naming, deep nesting, magic numbers, and commented code. - Established security rules to identify XSS risks, hardcoded secrets, SQL injection vulnerabilities, and insecure random generation. - Developed testing rules to enhance test quality, coverage, and maintainability, including missing assertions and error path testing. - Documented the structure and schema for rule files in the index.md for better understanding and usage.
753 lines
19 KiB
Markdown
753 lines
19 KiB
Markdown
# State Manager
|
|
|
|
Centralized state management module for Code Review workflow. Provides atomic operations, automatic backups, validation, and rollback capabilities.
|
|
|
|
## Overview
|
|
|
|
This module solves the fragile state management problem by providing:
|
|
- **Atomic updates** - Write to temp file, then rename (prevents corruption)
|
|
- **Automatic backups** - Every update creates a backup first
|
|
- **Rollback capability** - Restore from backup on failure
|
|
- **Schema validation** - Ensure state structure integrity
|
|
- **Change history** - Track all state modifications
|
|
|
|
## File Structure
|
|
|
|
```
|
|
{workDir}/
|
|
state.json # Current state
|
|
state.backup.json # Latest backup
|
|
state-history.json # Change history log
|
|
```
|
|
|
|
## API Reference
|
|
|
|
### initState(workDir)
|
|
|
|
Initialize a new state file with default values.
|
|
|
|
```javascript
|
|
/**
|
|
* Initialize state file with default structure
|
|
* @param {string} workDir - Working directory path
|
|
* @returns {object} - Initial state object
|
|
*/
|
|
function initState(workDir) {
|
|
const now = new Date().toISOString();
|
|
|
|
const initialState = {
|
|
status: 'pending',
|
|
started_at: now,
|
|
updated_at: now,
|
|
context: null,
|
|
scan_completed: false,
|
|
scan_summary: null,
|
|
reviewed_dimensions: [],
|
|
current_dimension: null,
|
|
findings: {
|
|
correctness: [],
|
|
readability: [],
|
|
performance: [],
|
|
security: [],
|
|
testing: [],
|
|
architecture: []
|
|
},
|
|
report_generated: false,
|
|
report_path: null,
|
|
current_action: null,
|
|
completed_actions: [],
|
|
errors: [],
|
|
error_count: 0,
|
|
summary: null
|
|
};
|
|
|
|
// Write state file
|
|
const statePath = `${workDir}/state.json`;
|
|
Write(statePath, JSON.stringify(initialState, null, 2));
|
|
|
|
// Initialize history log
|
|
const historyPath = `${workDir}/state-history.json`;
|
|
const historyEntry = {
|
|
entries: [{
|
|
timestamp: now,
|
|
action: 'init',
|
|
changes: { type: 'initialize', status: 'pending' }
|
|
}]
|
|
};
|
|
Write(historyPath, JSON.stringify(historyEntry, null, 2));
|
|
|
|
console.log(`[StateManager] Initialized state at ${statePath}`);
|
|
return initialState;
|
|
}
|
|
```
|
|
|
|
### getState(workDir)
|
|
|
|
Read and parse current state from file.
|
|
|
|
```javascript
|
|
/**
|
|
* Read current state from file
|
|
* @param {string} workDir - Working directory path
|
|
* @returns {object|null} - Current state or null if not found
|
|
*/
|
|
function getState(workDir) {
|
|
const statePath = `${workDir}/state.json`;
|
|
|
|
try {
|
|
const content = Read(statePath);
|
|
const state = JSON.parse(content);
|
|
|
|
// Validate structure before returning
|
|
const validation = validateState(state);
|
|
if (!validation.valid) {
|
|
console.warn(`[StateManager] State validation warnings: ${validation.warnings.join(', ')}`);
|
|
}
|
|
|
|
return state;
|
|
} catch (error) {
|
|
console.error(`[StateManager] Failed to read state: ${error.message}`);
|
|
return null;
|
|
}
|
|
}
|
|
```
|
|
|
|
### updateState(workDir, updates)
|
|
|
|
Safely update state with atomic write and automatic backup.
|
|
|
|
```javascript
|
|
/**
|
|
* Safely update state with atomic write
|
|
* @param {string} workDir - Working directory path
|
|
* @param {object} updates - Partial state updates to apply
|
|
* @returns {object} - Updated state object
|
|
* @throws {Error} - If update fails (automatically rolls back)
|
|
*/
|
|
function updateState(workDir, updates) {
|
|
const statePath = `${workDir}/state.json`;
|
|
const tempPath = `${workDir}/state.tmp.json`;
|
|
const backupPath = `${workDir}/state.backup.json`;
|
|
const historyPath = `${workDir}/state-history.json`;
|
|
|
|
// Step 1: Read current state
|
|
let currentState;
|
|
try {
|
|
currentState = JSON.parse(Read(statePath));
|
|
} catch (error) {
|
|
throw new Error(`Cannot read current state: ${error.message}`);
|
|
}
|
|
|
|
// Step 2: Create backup before any modification
|
|
try {
|
|
Write(backupPath, JSON.stringify(currentState, null, 2));
|
|
} catch (error) {
|
|
throw new Error(`Cannot create backup: ${error.message}`);
|
|
}
|
|
|
|
// Step 3: Merge updates
|
|
const now = new Date().toISOString();
|
|
const newState = deepMerge(currentState, {
|
|
...updates,
|
|
updated_at: now
|
|
});
|
|
|
|
// Step 4: Validate new state
|
|
const validation = validateState(newState);
|
|
if (!validation.valid && validation.errors.length > 0) {
|
|
throw new Error(`Invalid state after update: ${validation.errors.join(', ')}`);
|
|
}
|
|
|
|
// Step 5: Write to temp file first (atomic preparation)
|
|
try {
|
|
Write(tempPath, JSON.stringify(newState, null, 2));
|
|
} catch (error) {
|
|
throw new Error(`Cannot write temp state: ${error.message}`);
|
|
}
|
|
|
|
// Step 6: Atomic rename (replace original with temp)
|
|
try {
|
|
// Read temp and write to original (simulating atomic rename)
|
|
const tempContent = Read(tempPath);
|
|
Write(statePath, tempContent);
|
|
|
|
// Clean up temp file
|
|
Bash(`rm -f "${tempPath}"`);
|
|
} catch (error) {
|
|
// Rollback: restore from backup
|
|
console.error(`[StateManager] Update failed, rolling back: ${error.message}`);
|
|
try {
|
|
const backup = Read(backupPath);
|
|
Write(statePath, backup);
|
|
} catch (rollbackError) {
|
|
throw new Error(`Critical: Update failed and rollback failed: ${rollbackError.message}`);
|
|
}
|
|
throw new Error(`Update failed, rolled back: ${error.message}`);
|
|
}
|
|
|
|
// Step 7: Record in history
|
|
try {
|
|
let history = { entries: [] };
|
|
try {
|
|
history = JSON.parse(Read(historyPath));
|
|
} catch (e) {
|
|
// History file may not exist, start fresh
|
|
}
|
|
|
|
history.entries.push({
|
|
timestamp: now,
|
|
action: 'update',
|
|
changes: summarizeChanges(currentState, newState, updates)
|
|
});
|
|
|
|
// Keep only last 100 entries
|
|
if (history.entries.length > 100) {
|
|
history.entries = history.entries.slice(-100);
|
|
}
|
|
|
|
Write(historyPath, JSON.stringify(history, null, 2));
|
|
} catch (error) {
|
|
// History logging failure is non-critical
|
|
console.warn(`[StateManager] Failed to log history: ${error.message}`);
|
|
}
|
|
|
|
console.log(`[StateManager] State updated successfully`);
|
|
return newState;
|
|
}
|
|
|
|
/**
|
|
* Deep merge helper - merges nested objects
|
|
*/
|
|
function deepMerge(target, source) {
|
|
const result = { ...target };
|
|
|
|
for (const key of Object.keys(source)) {
|
|
if (source[key] === null || source[key] === undefined) {
|
|
result[key] = source[key];
|
|
} else if (Array.isArray(source[key])) {
|
|
result[key] = source[key];
|
|
} else if (typeof source[key] === 'object' && typeof target[key] === 'object') {
|
|
result[key] = deepMerge(target[key], source[key]);
|
|
} else {
|
|
result[key] = source[key];
|
|
}
|
|
}
|
|
|
|
return result;
|
|
}
|
|
|
|
/**
|
|
* Summarize changes for history logging
|
|
*/
|
|
function summarizeChanges(oldState, newState, updates) {
|
|
const changes = {};
|
|
|
|
for (const key of Object.keys(updates)) {
|
|
if (key === 'updated_at') continue;
|
|
|
|
const oldVal = oldState[key];
|
|
const newVal = newState[key];
|
|
|
|
if (JSON.stringify(oldVal) !== JSON.stringify(newVal)) {
|
|
changes[key] = {
|
|
from: typeof oldVal === 'object' ? '[object]' : oldVal,
|
|
to: typeof newVal === 'object' ? '[object]' : newVal
|
|
};
|
|
}
|
|
}
|
|
|
|
return changes;
|
|
}
|
|
```
|
|
|
|
### validateState(state)
|
|
|
|
Validate state structure against schema.
|
|
|
|
```javascript
|
|
/**
|
|
* Validate state structure
|
|
* @param {object} state - State object to validate
|
|
* @returns {object} - { valid: boolean, errors: string[], warnings: string[] }
|
|
*/
|
|
function validateState(state) {
|
|
const errors = [];
|
|
const warnings = [];
|
|
|
|
// Required fields
|
|
const requiredFields = ['status', 'started_at', 'updated_at'];
|
|
for (const field of requiredFields) {
|
|
if (state[field] === undefined) {
|
|
errors.push(`Missing required field: ${field}`);
|
|
}
|
|
}
|
|
|
|
// Status validation
|
|
const validStatuses = ['pending', 'running', 'completed', 'failed', 'user_exit'];
|
|
if (state.status && !validStatuses.includes(state.status)) {
|
|
errors.push(`Invalid status: ${state.status}. Must be one of: ${validStatuses.join(', ')}`);
|
|
}
|
|
|
|
// Timestamp format validation
|
|
const timestampFields = ['started_at', 'updated_at', 'completed_at'];
|
|
for (const field of timestampFields) {
|
|
if (state[field] && !isValidISOTimestamp(state[field])) {
|
|
warnings.push(`Invalid timestamp format for ${field}`);
|
|
}
|
|
}
|
|
|
|
// Findings structure validation
|
|
if (state.findings) {
|
|
const expectedDimensions = ['correctness', 'readability', 'performance', 'security', 'testing', 'architecture'];
|
|
for (const dim of expectedDimensions) {
|
|
if (!Array.isArray(state.findings[dim])) {
|
|
warnings.push(`findings.${dim} should be an array`);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Context validation (when present)
|
|
if (state.context !== null && state.context !== undefined) {
|
|
const contextFields = ['target_path', 'files', 'language', 'total_lines', 'file_count'];
|
|
for (const field of contextFields) {
|
|
if (state.context[field] === undefined) {
|
|
warnings.push(`context.${field} is missing`);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Error count validation
|
|
if (typeof state.error_count !== 'number') {
|
|
warnings.push('error_count should be a number');
|
|
}
|
|
|
|
// Array fields validation
|
|
const arrayFields = ['reviewed_dimensions', 'completed_actions', 'errors'];
|
|
for (const field of arrayFields) {
|
|
if (state[field] !== undefined && !Array.isArray(state[field])) {
|
|
errors.push(`${field} must be an array`);
|
|
}
|
|
}
|
|
|
|
return {
|
|
valid: errors.length === 0,
|
|
errors,
|
|
warnings
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Check if string is valid ISO timestamp
|
|
*/
|
|
function isValidISOTimestamp(str) {
|
|
if (typeof str !== 'string') return false;
|
|
const date = new Date(str);
|
|
return !isNaN(date.getTime()) && str.includes('T');
|
|
}
|
|
```
|
|
|
|
### backupState(workDir)
|
|
|
|
Create a manual backup of current state.
|
|
|
|
```javascript
|
|
/**
|
|
* Create a manual backup of current state
|
|
* @param {string} workDir - Working directory path
|
|
* @param {string} [suffix] - Optional suffix for backup file name
|
|
* @returns {string} - Backup file path
|
|
*/
|
|
function backupState(workDir, suffix = null) {
|
|
const statePath = `${workDir}/state.json`;
|
|
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
const backupName = suffix
|
|
? `state.backup.${suffix}.json`
|
|
: `state.backup.${timestamp}.json`;
|
|
const backupPath = `${workDir}/${backupName}`;
|
|
|
|
try {
|
|
const content = Read(statePath);
|
|
Write(backupPath, content);
|
|
console.log(`[StateManager] Backup created: ${backupPath}`);
|
|
return backupPath;
|
|
} catch (error) {
|
|
throw new Error(`Failed to create backup: ${error.message}`);
|
|
}
|
|
}
|
|
```
|
|
|
|
### restoreState(workDir, backupPath)
|
|
|
|
Restore state from a backup file.
|
|
|
|
```javascript
|
|
/**
|
|
* Restore state from a backup file
|
|
* @param {string} workDir - Working directory path
|
|
* @param {string} [backupPath] - Path to backup file (default: latest backup)
|
|
* @returns {object} - Restored state object
|
|
*/
|
|
function restoreState(workDir, backupPath = null) {
|
|
const statePath = `${workDir}/state.json`;
|
|
const defaultBackup = `${workDir}/state.backup.json`;
|
|
const historyPath = `${workDir}/state-history.json`;
|
|
|
|
const sourcePath = backupPath || defaultBackup;
|
|
|
|
try {
|
|
// Read backup
|
|
const backupContent = Read(sourcePath);
|
|
const backupState = JSON.parse(backupContent);
|
|
|
|
// Validate backup state
|
|
const validation = validateState(backupState);
|
|
if (!validation.valid) {
|
|
throw new Error(`Backup state is invalid: ${validation.errors.join(', ')}`);
|
|
}
|
|
|
|
// Create backup of current state before restore (for safety)
|
|
try {
|
|
const currentContent = Read(statePath);
|
|
Write(`${workDir}/state.pre-restore.json`, currentContent);
|
|
} catch (e) {
|
|
// Current state may not exist, that's okay
|
|
}
|
|
|
|
// Update timestamp
|
|
const now = new Date().toISOString();
|
|
backupState.updated_at = now;
|
|
|
|
// Write restored state
|
|
Write(statePath, JSON.stringify(backupState, null, 2));
|
|
|
|
// Log to history
|
|
try {
|
|
let history = { entries: [] };
|
|
try {
|
|
history = JSON.parse(Read(historyPath));
|
|
} catch (e) {}
|
|
|
|
history.entries.push({
|
|
timestamp: now,
|
|
action: 'restore',
|
|
changes: { source: sourcePath }
|
|
});
|
|
|
|
Write(historyPath, JSON.stringify(history, null, 2));
|
|
} catch (e) {
|
|
console.warn(`[StateManager] Failed to log restore to history`);
|
|
}
|
|
|
|
console.log(`[StateManager] State restored from ${sourcePath}`);
|
|
return backupState;
|
|
} catch (error) {
|
|
throw new Error(`Failed to restore state: ${error.message}`);
|
|
}
|
|
}
|
|
```
|
|
|
|
## Convenience Functions
|
|
|
|
### getNextDimension(state)
|
|
|
|
Get the next dimension to review based on current state.
|
|
|
|
```javascript
|
|
/**
|
|
* Get next dimension to review
|
|
* @param {object} state - Current state
|
|
* @returns {string|null} - Next dimension or null if all reviewed
|
|
*/
|
|
function getNextDimension(state) {
|
|
const dimensions = ['correctness', 'security', 'performance', 'readability', 'testing', 'architecture'];
|
|
const reviewed = state.reviewed_dimensions || [];
|
|
|
|
for (const dim of dimensions) {
|
|
if (!reviewed.includes(dim)) {
|
|
return dim;
|
|
}
|
|
}
|
|
|
|
return null;
|
|
}
|
|
```
|
|
|
|
### addFinding(workDir, finding)
|
|
|
|
Add a new finding to the state.
|
|
|
|
```javascript
|
|
/**
|
|
* Add a finding to the appropriate dimension
|
|
* @param {string} workDir - Working directory path
|
|
* @param {object} finding - Finding object (must include dimension field)
|
|
* @returns {object} - Updated state
|
|
*/
|
|
function addFinding(workDir, finding) {
|
|
if (!finding.dimension) {
|
|
throw new Error('Finding must have a dimension field');
|
|
}
|
|
|
|
const state = getState(workDir);
|
|
const dimension = finding.dimension;
|
|
|
|
// Generate ID if not provided
|
|
if (!finding.id) {
|
|
const prefixes = {
|
|
correctness: 'CORR',
|
|
readability: 'READ',
|
|
performance: 'PERF',
|
|
security: 'SEC',
|
|
testing: 'TEST',
|
|
architecture: 'ARCH'
|
|
};
|
|
const prefix = prefixes[dimension] || 'MISC';
|
|
const count = (state.findings[dimension]?.length || 0) + 1;
|
|
finding.id = `${prefix}-${String(count).padStart(3, '0')}`;
|
|
}
|
|
|
|
const currentFindings = state.findings[dimension] || [];
|
|
|
|
return updateState(workDir, {
|
|
findings: {
|
|
...state.findings,
|
|
[dimension]: [...currentFindings, finding]
|
|
}
|
|
});
|
|
}
|
|
```
|
|
|
|
### markDimensionComplete(workDir, dimension)
|
|
|
|
Mark a dimension as reviewed.
|
|
|
|
```javascript
|
|
/**
|
|
* Mark a dimension as reviewed
|
|
* @param {string} workDir - Working directory path
|
|
* @param {string} dimension - Dimension name
|
|
* @returns {object} - Updated state
|
|
*/
|
|
function markDimensionComplete(workDir, dimension) {
|
|
const state = getState(workDir);
|
|
const reviewed = state.reviewed_dimensions || [];
|
|
|
|
if (reviewed.includes(dimension)) {
|
|
console.warn(`[StateManager] Dimension ${dimension} already marked as reviewed`);
|
|
return state;
|
|
}
|
|
|
|
return updateState(workDir, {
|
|
reviewed_dimensions: [...reviewed, dimension],
|
|
current_dimension: null
|
|
});
|
|
}
|
|
```
|
|
|
|
### recordError(workDir, action, message)
|
|
|
|
Record an error in state.
|
|
|
|
```javascript
|
|
/**
|
|
* Record an execution error
|
|
* @param {string} workDir - Working directory path
|
|
* @param {string} action - Action that failed
|
|
* @param {string} message - Error message
|
|
* @returns {object} - Updated state
|
|
*/
|
|
function recordError(workDir, action, message) {
|
|
const state = getState(workDir);
|
|
const errors = state.errors || [];
|
|
const errorCount = (state.error_count || 0) + 1;
|
|
|
|
const newError = {
|
|
action,
|
|
message,
|
|
timestamp: new Date().toISOString()
|
|
};
|
|
|
|
const newState = updateState(workDir, {
|
|
errors: [...errors, newError],
|
|
error_count: errorCount
|
|
});
|
|
|
|
// Auto-fail if error count exceeds threshold
|
|
if (errorCount >= 3) {
|
|
return updateState(workDir, {
|
|
status: 'failed'
|
|
});
|
|
}
|
|
|
|
return newState;
|
|
}
|
|
```
|
|
|
|
## Usage Examples
|
|
|
|
### Initialize and Run Review
|
|
|
|
```javascript
|
|
// Initialize new review session
|
|
const workDir = '/path/to/review-session';
|
|
const state = initState(workDir);
|
|
|
|
// Update status to running
|
|
updateState(workDir, { status: 'running' });
|
|
|
|
// After collecting context
|
|
updateState(workDir, {
|
|
context: {
|
|
target_path: '/src/auth',
|
|
files: ['auth.ts', 'login.ts'],
|
|
language: 'typescript',
|
|
total_lines: 500,
|
|
file_count: 2
|
|
}
|
|
});
|
|
|
|
// After completing quick scan
|
|
updateState(workDir, {
|
|
scan_completed: true,
|
|
scan_summary: {
|
|
risk_areas: [{ file: 'auth.ts', reason: 'Complex logic', priority: 'high' }],
|
|
complexity_score: 7.5,
|
|
quick_issues: []
|
|
}
|
|
});
|
|
```
|
|
|
|
### Add Findings During Review
|
|
|
|
```javascript
|
|
// Add a security finding
|
|
addFinding(workDir, {
|
|
dimension: 'security',
|
|
severity: 'high',
|
|
category: 'injection',
|
|
file: 'auth.ts',
|
|
line: 45,
|
|
description: 'SQL injection vulnerability',
|
|
recommendation: 'Use parameterized queries'
|
|
});
|
|
|
|
// Mark dimension complete
|
|
markDimensionComplete(workDir, 'security');
|
|
```
|
|
|
|
### Error Handling with Rollback
|
|
|
|
```javascript
|
|
try {
|
|
updateState(workDir, {
|
|
status: 'running',
|
|
current_action: 'deep-review'
|
|
});
|
|
|
|
// ... do review work ...
|
|
|
|
} catch (error) {
|
|
// Record error
|
|
recordError(workDir, 'deep-review', error.message);
|
|
|
|
// If needed, restore from backup
|
|
restoreState(workDir);
|
|
}
|
|
```
|
|
|
|
### Check Review Progress
|
|
|
|
```javascript
|
|
const state = getState(workDir);
|
|
const nextDim = getNextDimension(state);
|
|
|
|
if (nextDim) {
|
|
console.log(`Next dimension to review: ${nextDim}`);
|
|
updateState(workDir, { current_dimension: nextDim });
|
|
} else {
|
|
console.log('All dimensions reviewed');
|
|
}
|
|
```
|
|
|
|
## Integration with Orchestrator
|
|
|
|
Update the orchestrator to use StateManager:
|
|
|
|
```javascript
|
|
// In orchestrator.md - Replace direct state operations with StateManager calls
|
|
|
|
// OLD:
|
|
const state = JSON.parse(Read(`${workDir}/state.json`));
|
|
|
|
// NEW:
|
|
const state = getState(workDir);
|
|
|
|
// OLD:
|
|
function updateState(updates) {
|
|
const state = JSON.parse(Read(`${workDir}/state.json`));
|
|
const newState = { ...state, ...updates, updated_at: new Date().toISOString() };
|
|
Write(`${workDir}/state.json`, JSON.stringify(newState, null, 2));
|
|
return newState;
|
|
}
|
|
|
|
// NEW:
|
|
// Import from state-manager.md
|
|
// updateState(workDir, updates) - handles atomic write, backup, validation
|
|
|
|
// Error handling - OLD:
|
|
updateState({
|
|
errors: [...(state.errors || []), { action: actionId, message: error.message, timestamp: new Date().toISOString() }],
|
|
error_count: (state.error_count || 0) + 1
|
|
});
|
|
|
|
// Error handling - NEW:
|
|
recordError(workDir, actionId, error.message);
|
|
```
|
|
|
|
## State History Format
|
|
|
|
The `state-history.json` file tracks all state changes:
|
|
|
|
```json
|
|
{
|
|
"entries": [
|
|
{
|
|
"timestamp": "2024-01-01T10:00:00.000Z",
|
|
"action": "init",
|
|
"changes": { "type": "initialize", "status": "pending" }
|
|
},
|
|
{
|
|
"timestamp": "2024-01-01T10:01:00.000Z",
|
|
"action": "update",
|
|
"changes": {
|
|
"status": { "from": "pending", "to": "running" },
|
|
"current_action": { "from": null, "to": "action-collect-context" }
|
|
}
|
|
},
|
|
{
|
|
"timestamp": "2024-01-01T10:05:00.000Z",
|
|
"action": "restore",
|
|
"changes": { "source": "/path/state.backup.json" }
|
|
}
|
|
]
|
|
}
|
|
```
|
|
|
|
## Error Recovery Strategies
|
|
|
|
| Scenario | Strategy | Function |
|
|
|----------|----------|----------|
|
|
| State file corrupted | Restore from backup | `restoreState(workDir)` |
|
|
| Invalid state after update | Auto-rollback (built-in) | N/A (automatic) |
|
|
| Multiple errors | Auto-fail after 3 | `recordError()` |
|
|
| Need to retry from checkpoint | Restore specific backup | `restoreState(workDir, backupPath)` |
|
|
| Review interrupted | Resume from saved state | `getState(workDir)` |
|
|
|
|
## Best Practices
|
|
|
|
1. **Always use `updateState()`** - Never write directly to state.json
|
|
2. **Check validation warnings** - Warnings may indicate data issues
|
|
3. **Use convenience functions** - `addFinding()`, `markDimensionComplete()`, etc.
|
|
4. **Monitor history** - Check state-history.json for debugging
|
|
5. **Create named backups** - Before major operations: `backupState(workDir, 'pre-deep-review')`
|