mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-11 02:33:51 +08:00
feat: enhance multi-CLI planning with new schema for solutions and implementation plans; improve file handling with async methods
This commit is contained in:
@@ -60,24 +60,43 @@ Phase 5: Output Generation
|
|||||||
|
|
||||||
**Output Path**: `{session.folder}/rounds/{round_number}/synthesis.json`
|
**Output Path**: `{session.folder}/rounds/{round_number}/synthesis.json`
|
||||||
|
|
||||||
### Primary Fields (orchestrator reads these)
|
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"round": 1,
|
"round": 1,
|
||||||
"solutions": [
|
"solutions": [
|
||||||
{
|
{
|
||||||
"name": "Solution Name",
|
"name": "Solution Name",
|
||||||
"description": "What this does",
|
|
||||||
"source_cli": ["gemini", "codex"],
|
"source_cli": ["gemini", "codex"],
|
||||||
"pros": ["advantage 1"],
|
"feasibility": 0.85,
|
||||||
"cons": ["disadvantage 1"],
|
|
||||||
"effort": "low|medium|high",
|
"effort": "low|medium|high",
|
||||||
"risk": "low|medium|high",
|
"risk": "low|medium|high",
|
||||||
"maintainability": "low|medium|high",
|
"summary": "Brief analysis summary",
|
||||||
"performance_impact": "positive|neutral|negative",
|
"implementation_plan": {
|
||||||
"affected_files": [{"file": "path", "line": 10, "reason": "why"}],
|
"approach": "High-level technical approach",
|
||||||
"score": 85
|
"tasks": [
|
||||||
|
{
|
||||||
|
"id": "T1",
|
||||||
|
"name": "Task name",
|
||||||
|
"depends_on": [],
|
||||||
|
"files": [{"file": "path", "line": 10, "action": "modify|create|delete"}],
|
||||||
|
"key_point": "Critical consideration for this task"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "T2",
|
||||||
|
"name": "Second task",
|
||||||
|
"depends_on": ["T1"],
|
||||||
|
"files": [{"file": "path2", "line": 1, "action": "create"}],
|
||||||
|
"key_point": null
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"execution_flow": "T1 → T2 → T3 (T2,T3 can parallel after T1)",
|
||||||
|
"milestones": ["Interface defined", "Core logic complete", "Tests passing"]
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"internal": ["@/lib/module"],
|
||||||
|
"external": ["npm:package@version"]
|
||||||
|
},
|
||||||
|
"technical_concerns": ["Potential blocker 1", "Risk area 2"]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"convergence": {
|
"convergence": {
|
||||||
@@ -94,14 +113,21 @@ Phase 5: Output Generation
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Extended Fields (for visualization)
|
**Schema Fields**:
|
||||||
|
|
||||||
- `metadata` - artifactId, timestamp, contributingAgents, durationSeconds
|
| Field | Purpose |
|
||||||
- `discussionTopic` - title, description, scope, status, tags
|
|-------|---------|
|
||||||
- `relatedFiles` - fileTree, impactSummary
|
| `feasibility` | Quantitative viability score (0-1) |
|
||||||
- `planning` - functional/nonFunctional requirements
|
| `summary` | Narrative analysis summary |
|
||||||
- `decision` - status, selectedSolution, rejectedAlternatives
|
| `implementation_plan.approach` | High-level technical strategy |
|
||||||
- `decisionRecords` - timeline events
|
| `implementation_plan.tasks[]` | Discrete implementation tasks |
|
||||||
|
| `implementation_plan.tasks[].depends_on` | Task dependencies (IDs) |
|
||||||
|
| `implementation_plan.tasks[].key_point` | Critical consideration for task |
|
||||||
|
| `implementation_plan.execution_flow` | Visual task sequence |
|
||||||
|
| `implementation_plan.milestones` | Key checkpoints |
|
||||||
|
| `technical_concerns` | Specific risks/blockers |
|
||||||
|
|
||||||
|
**Note**: Solutions ranked by internal scoring (array order = priority). `pros/cons` merged into `summary` and `technical_concerns`.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -273,7 +299,7 @@ Second+ CLI receives prior analysis for verification:
|
|||||||
3. Combine pros/cons/affected_files from multiple sources
|
3. Combine pros/cons/affected_files from multiple sources
|
||||||
4. Track source_cli attribution
|
4. Track source_cli attribution
|
||||||
|
|
||||||
**Scoring formula**:
|
**Internal scoring** (used for ranking, not exported):
|
||||||
```
|
```
|
||||||
score = (source_cli.length × 20) // Multi-CLI consensus
|
score = (source_cli.length × 20) // Multi-CLI consensus
|
||||||
+ effort_score[effort] // low=30, medium=20, high=10
|
+ effort_score[effort] // low=30, medium=20, high=10
|
||||||
@@ -282,7 +308,7 @@ score = (source_cli.length × 20) // Multi-CLI consensus
|
|||||||
+ min(affected_files.length × 3, 15) // Specificity
|
+ min(affected_files.length × 3, 15) // Specificity
|
||||||
```
|
```
|
||||||
|
|
||||||
**Output**: Top 3 solutions ranked by score
|
**Output**: Top 3 solutions, ranked in array order (highest score first)
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ Task({
|
|||||||
- task_description: ${taskDescription}
|
- task_description: ${taskDescription}
|
||||||
- round_number: ${currentRound}
|
- round_number: ${currentRound}
|
||||||
- session: { id: "${sessionId}", folder: "${sessionFolder}" }
|
- session: { id: "${sessionId}", folder: "${sessionFolder}" }
|
||||||
- ace_context: ${JSON.stringify(contextPackage)}
|
- ace_context: ${JSON.stringify(contextPackageage)}
|
||||||
- previous_rounds: ${JSON.stringify(analysisResults)}
|
- previous_rounds: ${JSON.stringify(analysisResults)}
|
||||||
- user_feedback: ${userFeedback || 'None'}
|
- user_feedback: ${userFeedback || 'None'}
|
||||||
- cli_config: { tools: ["gemini", "codex"], mode: "parallel", fallback_chain: ["gemini", "codex", "claude"] }
|
- cli_config: { tools: ["gemini", "codex"], mode: "parallel", fallback_chain: ["gemini", "codex", "claude"] }
|
||||||
@@ -225,7 +225,96 @@ AskUserQuestion({
|
|||||||
|
|
||||||
### Phase 5: Plan Generation
|
### Phase 5: Plan Generation
|
||||||
|
|
||||||
**Invoke Planning Agent**:
|
**Step 1: Build Context-Package** (Orchestrator responsibility):
|
||||||
|
```javascript
|
||||||
|
// Extract key information from user decision and synthesis
|
||||||
|
const contextPackage = {
|
||||||
|
// Core solution details
|
||||||
|
solution: {
|
||||||
|
name: selectedSolution.name,
|
||||||
|
source_cli: selectedSolution.source_cli,
|
||||||
|
feasibility: selectedSolution.feasibility,
|
||||||
|
effort: selectedSolution.effort,
|
||||||
|
risk: selectedSolution.risk,
|
||||||
|
summary: selectedSolution.summary
|
||||||
|
},
|
||||||
|
// Implementation plan (tasks, flow, milestones)
|
||||||
|
implementation_plan: selectedSolution.implementation_plan,
|
||||||
|
// Dependencies
|
||||||
|
dependencies: selectedSolution.dependencies || { internal: [], external: [] },
|
||||||
|
// Technical concerns
|
||||||
|
technical_concerns: selectedSolution.technical_concerns || [],
|
||||||
|
// Consensus from cross-verification
|
||||||
|
consensus: {
|
||||||
|
agreements: synthesis.cross_verification.agreements,
|
||||||
|
resolved_conflicts: synthesis.cross_verification.resolution
|
||||||
|
},
|
||||||
|
// User constraints (from Phase 4 feedback)
|
||||||
|
constraints: userConstraints || [],
|
||||||
|
// Task context
|
||||||
|
task_description: taskDescription,
|
||||||
|
session_id: sessionId
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write context-package for traceability
|
||||||
|
Write(`${sessionFolder}/context-package.json`, JSON.stringify(contextPackage, null, 2))
|
||||||
|
```
|
||||||
|
|
||||||
|
**Context-Package Schema**:
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| `solution` | object | User-selected solution from synthesis |
|
||||||
|
| `solution.name` | string | Solution identifier |
|
||||||
|
| `solution.feasibility` | number | Viability score (0-1) |
|
||||||
|
| `solution.summary` | string | Brief analysis summary |
|
||||||
|
| `implementation_plan` | object | Task breakdown with flow and dependencies |
|
||||||
|
| `implementation_plan.approach` | string | High-level technical strategy |
|
||||||
|
| `implementation_plan.tasks[]` | array | Discrete tasks with id, name, depends_on, files |
|
||||||
|
| `implementation_plan.execution_flow` | string | Task sequence (e.g., "T1 → T2 → T3") |
|
||||||
|
| `implementation_plan.milestones` | string[] | Key checkpoints |
|
||||||
|
| `dependencies` | object | Module and package dependencies |
|
||||||
|
| `technical_concerns` | string[] | Risks and blockers |
|
||||||
|
| `consensus` | object | Cross-verified agreements from multi-CLI |
|
||||||
|
| `constraints` | string[] | User-specified constraints from Phase 4 |
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"solution": {
|
||||||
|
"name": "Strategy Pattern Refactoring",
|
||||||
|
"source_cli": ["gemini", "codex"],
|
||||||
|
"feasibility": 0.88,
|
||||||
|
"effort": "medium",
|
||||||
|
"risk": "low",
|
||||||
|
"summary": "Extract payment gateway interface, implement strategy pattern for multi-gateway support"
|
||||||
|
},
|
||||||
|
"implementation_plan": {
|
||||||
|
"approach": "Define interface → Create concrete strategies → Implement factory → Migrate existing code",
|
||||||
|
"tasks": [
|
||||||
|
{"id": "T1", "name": "Define PaymentGateway interface", "depends_on": [], "files": [{"file": "src/types/payment.ts", "line": 1, "action": "create"}], "key_point": "Include all existing Stripe methods"},
|
||||||
|
{"id": "T2", "name": "Implement StripeGateway", "depends_on": ["T1"], "files": [{"file": "src/payment/stripe.ts", "line": 1, "action": "create"}], "key_point": "Wrap existing logic"},
|
||||||
|
{"id": "T3", "name": "Create GatewayFactory", "depends_on": ["T1"], "files": [{"file": "src/payment/factory.ts", "line": 1, "action": "create"}], "key_point": null},
|
||||||
|
{"id": "T4", "name": "Migrate processor to use factory", "depends_on": ["T2", "T3"], "files": [{"file": "src/payment/processor.ts", "line": 45, "action": "modify"}], "key_point": "Backward compatible"}
|
||||||
|
],
|
||||||
|
"execution_flow": "T1 → (T2 | T3) → T4",
|
||||||
|
"milestones": ["Interface defined", "Gateway implementations complete", "Migration done"]
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"internal": ["@/lib/payment-gateway", "@/types/payment"],
|
||||||
|
"external": ["stripe@^14.0.0"]
|
||||||
|
},
|
||||||
|
"technical_concerns": ["Existing tests must pass", "No breaking API changes"],
|
||||||
|
"consensus": {
|
||||||
|
"agreements": ["Use strategy pattern", "Keep existing API"],
|
||||||
|
"resolved_conflicts": "Factory over DI for simpler integration"
|
||||||
|
},
|
||||||
|
"constraints": ["backward compatible", "no breaking changes to PaymentResult type"],
|
||||||
|
"task_description": "Refactor payment processing for multi-gateway support",
|
||||||
|
"session_id": "MCP-payment-refactor-2026-01-14"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Step 2: Invoke Planning Agent**:
|
||||||
```javascript
|
```javascript
|
||||||
Task({
|
Task({
|
||||||
subagent_type: "cli-lite-planning-agent",
|
subagent_type: "cli-lite-planning-agent",
|
||||||
@@ -235,29 +324,35 @@ Task({
|
|||||||
## Schema Reference
|
## Schema Reference
|
||||||
Execute: cat ~/.claude/workflows/cli-templates/schemas/plan-json-schema.json
|
Execute: cat ~/.claude/workflows/cli-templates/schemas/plan-json-schema.json
|
||||||
|
|
||||||
## Selected Solution
|
## Context-Package (from orchestrator)
|
||||||
${JSON.stringify(selectedSolution)}
|
${JSON.stringify(contextPackage, null, 2)}
|
||||||
|
|
||||||
## Analysis Consensus
|
|
||||||
${synthesis.cross_verification.agreements.join('\n')}
|
|
||||||
|
|
||||||
## Execution Process
|
## Execution Process
|
||||||
1. Read plan-json-schema.json for output structure
|
1. Read plan-json-schema.json for output structure
|
||||||
2. Read project-tech.json and project-guidelines.json
|
2. Read project-tech.json and project-guidelines.json
|
||||||
3. Decompose solution into 2-7 tasks (group by feature, not file)
|
3. Parse context-package fields:
|
||||||
4. Assign dependencies and execution groups
|
- solution: name, feasibility, summary
|
||||||
5. Generate IMPL_PLAN.md with step-by-step documentation
|
- implementation_plan: tasks[], execution_flow, milestones
|
||||||
6. Generate plan.json following schema exactly
|
- dependencies: internal[], external[]
|
||||||
|
- technical_concerns: risks/blockers
|
||||||
|
- consensus: agreements, resolved_conflicts
|
||||||
|
- constraints: user requirements
|
||||||
|
4. Use implementation_plan.tasks[] as task foundation
|
||||||
|
5. Preserve task dependencies (depends_on) and execution_flow
|
||||||
|
6. Expand tasks with detailed acceptance criteria
|
||||||
|
7. Generate IMPL_PLAN.md documenting milestones and key_points
|
||||||
|
8. Generate plan.json following schema exactly
|
||||||
|
|
||||||
## Output
|
## Output
|
||||||
- ${sessionFolder}/IMPL_PLAN.md
|
- ${sessionFolder}/IMPL_PLAN.md
|
||||||
- ${sessionFolder}/plan.json
|
- ${sessionFolder}/plan.json
|
||||||
|
|
||||||
## Completion Checklist
|
## Completion Checklist
|
||||||
- [ ] IMPL_PLAN.md written with complete documentation
|
- [ ] IMPL_PLAN.md documents approach, milestones, technical_concerns
|
||||||
- [ ] plan.json follows schema exactly
|
- [ ] plan.json preserves task dependencies from implementation_plan
|
||||||
- [ ] All affected files have line numbers
|
- [ ] Task execution order follows execution_flow
|
||||||
- [ ] Tasks grouped by feature (not one per file)
|
- [ ] Key_points reflected in task descriptions
|
||||||
|
- [ ] User constraints applied to implementation
|
||||||
- [ ] Acceptance criteria are testable
|
- [ ] Acceptance criteria are testable
|
||||||
`
|
`
|
||||||
})
|
})
|
||||||
@@ -279,6 +374,7 @@ if (userConfirms) {
|
|||||||
│ ├── 1/synthesis.json # Round 1 analysis (cli-discuss-agent)
|
│ ├── 1/synthesis.json # Round 1 analysis (cli-discuss-agent)
|
||||||
│ ├── 2/synthesis.json # Round 2 analysis (cli-discuss-agent)
|
│ ├── 2/synthesis.json # Round 2 analysis (cli-discuss-agent)
|
||||||
│ └── .../
|
│ └── .../
|
||||||
|
├── context-package.json # Extracted context for planning (orchestrator)
|
||||||
├── IMPL_PLAN.md # Documentation (cli-lite-planning-agent)
|
├── IMPL_PLAN.md # Documentation (cli-lite-planning-agent)
|
||||||
└── plan.json # Structured plan (cli-lite-planning-agent)
|
└── plan.json # Structured plan (cli-lite-planning-agent)
|
||||||
```
|
```
|
||||||
@@ -289,23 +385,32 @@ if (userConfirms) {
|
|||||||
|------|----------|---------|
|
|------|----------|---------|
|
||||||
| `session-state.json` | Orchestrator | Session metadata, rounds, decisions |
|
| `session-state.json` | Orchestrator | Session metadata, rounds, decisions |
|
||||||
| `rounds/*/synthesis.json` | cli-discuss-agent | Solutions, convergence, cross-verification |
|
| `rounds/*/synthesis.json` | cli-discuss-agent | Solutions, convergence, cross-verification |
|
||||||
|
| `context-package.json` | Orchestrator | Extracted solution, dependencies, consensus for planning |
|
||||||
| `IMPL_PLAN.md` | cli-lite-planning-agent | Human-readable plan |
|
| `IMPL_PLAN.md` | cli-lite-planning-agent | Human-readable plan |
|
||||||
| `plan.json` | cli-lite-planning-agent | Structured tasks for execution |
|
| `plan.json` | cli-lite-planning-agent | Structured tasks for execution |
|
||||||
|
|
||||||
## synthesis.json Schema
|
## synthesis.json Schema
|
||||||
|
|
||||||
**Primary Fields** (orchestrator reads these):
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"round": 1,
|
"round": 1,
|
||||||
"solutions": [{
|
"solutions": [{
|
||||||
"name": "Solution Name",
|
"name": "Solution Name",
|
||||||
"description": "What this does",
|
|
||||||
"source_cli": ["gemini", "codex"],
|
"source_cli": ["gemini", "codex"],
|
||||||
"pros": [], "cons": [],
|
"feasibility": 0.85,
|
||||||
"effort": "low|medium|high",
|
"effort": "low|medium|high",
|
||||||
"risk": "low|medium|high",
|
"risk": "low|medium|high",
|
||||||
"affected_files": [{"file": "path", "line": 10, "reason": "why"}]
|
"summary": "Brief analysis summary",
|
||||||
|
"implementation_plan": {
|
||||||
|
"approach": "High-level technical approach",
|
||||||
|
"tasks": [
|
||||||
|
{"id": "T1", "name": "Task", "depends_on": [], "files": [], "key_point": "..."}
|
||||||
|
],
|
||||||
|
"execution_flow": "T1 → T2 → T3",
|
||||||
|
"milestones": ["Checkpoint 1", "Checkpoint 2"]
|
||||||
|
},
|
||||||
|
"dependencies": {"internal": [], "external": []},
|
||||||
|
"technical_concerns": ["Risk 1", "Blocker 2"]
|
||||||
}],
|
}],
|
||||||
"convergence": {
|
"convergence": {
|
||||||
"score": 0.85,
|
"score": 0.85,
|
||||||
@@ -321,7 +426,17 @@ if (userConfirms) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
**Extended Fields** (for visualization): `metadata`, `discussionTopic`, `relatedFiles`, `planning`, `decision`, `decisionRecords`
|
**Key Planning Fields**:
|
||||||
|
|
||||||
|
| Field | Purpose |
|
||||||
|
|-------|---------|
|
||||||
|
| `feasibility` | Viability score (0-1) |
|
||||||
|
| `implementation_plan.tasks[]` | Discrete tasks with dependencies |
|
||||||
|
| `implementation_plan.execution_flow` | Task sequence visualization |
|
||||||
|
| `implementation_plan.milestones` | Key checkpoints |
|
||||||
|
| `technical_concerns` | Risks and blockers |
|
||||||
|
|
||||||
|
**Note**: Solutions ranked by internal scoring (array order = priority)
|
||||||
|
|
||||||
## TodoWrite Structure
|
## TodoWrite Structure
|
||||||
|
|
||||||
|
|||||||
@@ -195,8 +195,65 @@ async function scanMultiCliDir(dir: string): Promise<LiteSession[]> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// NEW Schema types for multi-cli synthesis
|
||||||
|
interface SolutionFileAction {
|
||||||
|
file: string;
|
||||||
|
line: number;
|
||||||
|
action: 'modify' | 'create' | 'delete';
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SolutionTask {
|
||||||
|
id: string;
|
||||||
|
name: string;
|
||||||
|
depends_on: string[];
|
||||||
|
files: SolutionFileAction[];
|
||||||
|
key_point: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SolutionImplementationPlan {
|
||||||
|
approach: string;
|
||||||
|
tasks: SolutionTask[];
|
||||||
|
execution_flow: string;
|
||||||
|
milestones: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SolutionDependencies {
|
||||||
|
internal: string[];
|
||||||
|
external: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Solution {
|
||||||
|
name: string;
|
||||||
|
source_cli: string[];
|
||||||
|
feasibility: number; // 0-1
|
||||||
|
effort: 'low' | 'medium' | 'high';
|
||||||
|
risk: 'low' | 'medium' | 'high';
|
||||||
|
summary: string;
|
||||||
|
implementation_plan: SolutionImplementationPlan;
|
||||||
|
dependencies: SolutionDependencies;
|
||||||
|
technical_concerns: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SynthesisConvergence {
|
||||||
|
score: number;
|
||||||
|
new_insights: boolean;
|
||||||
|
recommendation: 'converged' | 'continue' | 'user_input_needed';
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SynthesisCrossVerification {
|
||||||
|
agreements: string[];
|
||||||
|
disagreements: string[];
|
||||||
|
resolution: string;
|
||||||
|
}
|
||||||
|
|
||||||
interface RoundSynthesis {
|
interface RoundSynthesis {
|
||||||
round: number;
|
round: number;
|
||||||
|
// NEW schema fields
|
||||||
|
solutions?: Solution[];
|
||||||
|
convergence?: SynthesisConvergence;
|
||||||
|
cross_verification?: SynthesisCrossVerification;
|
||||||
|
clarification_questions?: string[];
|
||||||
|
// OLD schema fields (backward compatibility)
|
||||||
converged?: boolean;
|
converged?: boolean;
|
||||||
tasks?: unknown[];
|
tasks?: unknown[];
|
||||||
synthesis?: unknown;
|
synthesis?: unknown;
|
||||||
@@ -230,31 +287,72 @@ async function loadRoundSyntheses(sessionPath: string): Promise<RoundSynthesis[]
|
|||||||
const synthesis = JSON.parse(content) as RoundSynthesis;
|
const synthesis = JSON.parse(content) as RoundSynthesis;
|
||||||
synthesis.round = roundDir.num;
|
synthesis.round = roundDir.num;
|
||||||
syntheses.push(synthesis);
|
syntheses.push(synthesis);
|
||||||
} catch {
|
} catch (e) {
|
||||||
// Skip if synthesis.json doesn't exist or can't be parsed
|
console.warn('Failed to parse synthesis file:', synthesisPath, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch {
|
} catch (e) {
|
||||||
// Return empty array if rounds directory doesn't exist
|
// Ignore ENOENT errors (directory doesn't exist), warn on others
|
||||||
|
if ((e as NodeJS.ErrnoException).code !== 'ENOENT') {
|
||||||
|
console.warn('Failed to read rounds directory:', roundsDir, (e as Error).message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return syntheses;
|
return syntheses;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Extended Progress interface for multi-cli sessions
|
||||||
|
interface MultiCliProgress extends Progress {
|
||||||
|
convergenceScore?: number;
|
||||||
|
recommendation?: 'converged' | 'continue' | 'user_input_needed';
|
||||||
|
solutionsCount?: number;
|
||||||
|
avgFeasibility?: number;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate progress for multi-cli-plan sessions
|
* Calculate progress for multi-cli-plan sessions
|
||||||
|
* Uses new convergence.score and convergence.recommendation when available
|
||||||
|
* Falls back to old converged boolean for backward compatibility
|
||||||
* @param syntheses - Array of round syntheses
|
* @param syntheses - Array of round syntheses
|
||||||
* @returns Progress info
|
* @returns Progress info with convergence metrics
|
||||||
*/
|
*/
|
||||||
function calculateMultiCliProgress(syntheses: RoundSynthesis[]): Progress {
|
function calculateMultiCliProgress(syntheses: RoundSynthesis[]): MultiCliProgress {
|
||||||
if (syntheses.length === 0) {
|
if (syntheses.length === 0) {
|
||||||
return { total: 0, completed: 0, percentage: 0 };
|
return { total: 0, completed: 0, percentage: 0 };
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestSynthesis = syntheses[syntheses.length - 1];
|
const latestSynthesis = syntheses[syntheses.length - 1];
|
||||||
const isConverged = latestSynthesis.converged === true;
|
|
||||||
|
|
||||||
// Total is based on expected rounds or actual rounds
|
// NEW schema: Use convergence object
|
||||||
|
if (latestSynthesis.convergence) {
|
||||||
|
const { score, recommendation } = latestSynthesis.convergence;
|
||||||
|
const isConverged = recommendation === 'converged';
|
||||||
|
|
||||||
|
// Calculate solutions metrics
|
||||||
|
const solutions = latestSynthesis.solutions || [];
|
||||||
|
const solutionsCount = solutions.length;
|
||||||
|
const avgFeasibility = solutionsCount > 0
|
||||||
|
? solutions.reduce((sum, s) => sum + (s.feasibility || 0), 0) / solutionsCount
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
// Total is based on rounds, percentage derived from convergence score
|
||||||
|
const total = syntheses.length;
|
||||||
|
const completed = isConverged ? total : Math.max(0, total - 1);
|
||||||
|
const percentage = isConverged ? 100 : Math.round(score * 100);
|
||||||
|
|
||||||
|
return {
|
||||||
|
total,
|
||||||
|
completed,
|
||||||
|
percentage,
|
||||||
|
convergenceScore: score,
|
||||||
|
recommendation,
|
||||||
|
solutionsCount,
|
||||||
|
avgFeasibility: Math.round(avgFeasibility * 100) / 100
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// OLD schema: Fallback to converged boolean
|
||||||
|
const isConverged = latestSynthesis.converged === true;
|
||||||
const total = syntheses.length;
|
const total = syntheses.length;
|
||||||
const completed = isConverged ? total : Math.max(0, total - 1);
|
const completed = isConverged ? total : Math.max(0, total - 1);
|
||||||
const percentage = isConverged ? 100 : Math.round((completed / Math.max(total, 1)) * 100);
|
const percentage = isConverged ? 100 : Math.round((completed / Math.max(total, 1)) * 100);
|
||||||
@@ -264,6 +362,8 @@ function calculateMultiCliProgress(syntheses: RoundSynthesis[]): Progress {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Extract tasks from synthesis objects
|
* Extract tasks from synthesis objects
|
||||||
|
* NEW schema: Extract from solutions[].implementation_plan.tasks
|
||||||
|
* OLD schema: Extract from tasks[] array directly
|
||||||
* @param syntheses - Array of round syntheses
|
* @param syntheses - Array of round syntheses
|
||||||
* @returns Normalized tasks from latest synthesis
|
* @returns Normalized tasks from latest synthesis
|
||||||
*/
|
*/
|
||||||
@@ -271,8 +371,33 @@ function extractTasksFromSyntheses(syntheses: RoundSynthesis[]): NormalizedTask[
|
|||||||
if (syntheses.length === 0) return [];
|
if (syntheses.length === 0) return [];
|
||||||
|
|
||||||
const latestSynthesis = syntheses[syntheses.length - 1];
|
const latestSynthesis = syntheses[syntheses.length - 1];
|
||||||
const tasks = latestSynthesis.tasks;
|
|
||||||
|
|
||||||
|
// NEW schema: Extract tasks from solutions
|
||||||
|
if (latestSynthesis.solutions && Array.isArray(latestSynthesis.solutions)) {
|
||||||
|
const allTasks: NormalizedTask[] = [];
|
||||||
|
|
||||||
|
for (const solution of latestSynthesis.solutions) {
|
||||||
|
const implPlan = solution.implementation_plan;
|
||||||
|
if (!implPlan?.tasks || !Array.isArray(implPlan.tasks)) continue;
|
||||||
|
|
||||||
|
for (const task of implPlan.tasks) {
|
||||||
|
const normalizedTask = normalizeSolutionTask(task, solution);
|
||||||
|
if (normalizedTask) {
|
||||||
|
allTasks.push(normalizedTask);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by task ID
|
||||||
|
return allTasks.sort((a, b) => {
|
||||||
|
const aNum = parseInt(a.id?.replace(/\D/g, '') || '0');
|
||||||
|
const bNum = parseInt(b.id?.replace(/\D/g, '') || '0');
|
||||||
|
return aNum - bNum;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// OLD schema: Extract from tasks array directly
|
||||||
|
const tasks = latestSynthesis.tasks;
|
||||||
if (!Array.isArray(tasks)) return [];
|
if (!Array.isArray(tasks)) return [];
|
||||||
|
|
||||||
return tasks
|
return tasks
|
||||||
@@ -280,6 +405,50 @@ function extractTasksFromSyntheses(syntheses: RoundSynthesis[]): NormalizedTask[
|
|||||||
.filter((task): task is NormalizedTask => task !== null);
|
.filter((task): task is NormalizedTask => task !== null);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize a solution task from NEW schema to NormalizedTask
|
||||||
|
* @param task - SolutionTask from new schema
|
||||||
|
* @param solution - Parent solution for context
|
||||||
|
* @returns Normalized task
|
||||||
|
*/
|
||||||
|
function normalizeSolutionTask(task: SolutionTask, solution: Solution): NormalizedTask | null {
|
||||||
|
if (!task || !task.id) return null;
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: task.id,
|
||||||
|
title: task.name || 'Untitled Task',
|
||||||
|
status: (task as unknown as { status?: string }).status || 'pending',
|
||||||
|
meta: {
|
||||||
|
type: 'implementation',
|
||||||
|
agent: null,
|
||||||
|
scope: solution.name || null,
|
||||||
|
module: null
|
||||||
|
},
|
||||||
|
context: {
|
||||||
|
requirements: task.key_point ? [task.key_point] : [],
|
||||||
|
focus_paths: task.files?.map(f => f.file) || [],
|
||||||
|
acceptance: [],
|
||||||
|
depends_on: task.depends_on || []
|
||||||
|
},
|
||||||
|
flow_control: {
|
||||||
|
implementation_approach: task.files?.map((f, i) => ({
|
||||||
|
step: `Step ${i + 1}`,
|
||||||
|
action: `${f.action} ${f.file}${f.line ? ` at line ${f.line}` : ''}`
|
||||||
|
})) || []
|
||||||
|
},
|
||||||
|
_raw: {
|
||||||
|
task,
|
||||||
|
solution: {
|
||||||
|
name: solution.name,
|
||||||
|
source_cli: solution.source_cli,
|
||||||
|
feasibility: solution.feasibility,
|
||||||
|
effort: solution.effort,
|
||||||
|
risk: solution.risk
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load plan.json or fix-plan.json from session directory
|
* Load plan.json or fix-plan.json from session directory
|
||||||
* @param sessionPath - Session directory path
|
* @param sessionPath - Session directory path
|
||||||
|
|||||||
@@ -2,10 +2,25 @@
|
|||||||
* Session Routes Module
|
* Session Routes Module
|
||||||
* Handles all Session/Task-related API endpoints
|
* Handles all Session/Task-related API endpoints
|
||||||
*/
|
*/
|
||||||
import { readFileSync, writeFileSync, existsSync, readdirSync } from 'fs';
|
import { readFileSync, writeFileSync, existsSync } from 'fs';
|
||||||
|
import { readFile, readdir, access } from 'fs/promises';
|
||||||
import { join } from 'path';
|
import { join } from 'path';
|
||||||
import type { RouteContext } from './types.js';
|
import type { RouteContext } from './types.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a file or directory exists (async version)
|
||||||
|
* @param filePath - Path to check
|
||||||
|
* @returns Promise<boolean>
|
||||||
|
*/
|
||||||
|
async function fileExists(filePath: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
await access(filePath);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get session detail data (context, summaries, impl-plan, review, multi-cli)
|
* Get session detail data (context, summaries, impl-plan, review, multi-cli)
|
||||||
* @param {string} sessionPath - Path to session directory
|
* @param {string} sessionPath - Path to session directory
|
||||||
@@ -23,14 +38,15 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
if (dataType === 'context' || dataType === 'all') {
|
if (dataType === 'context' || dataType === 'all') {
|
||||||
// Try .process/context-package.json first (common location)
|
// Try .process/context-package.json first (common location)
|
||||||
let contextFile = join(normalizedPath, '.process', 'context-package.json');
|
let contextFile = join(normalizedPath, '.process', 'context-package.json');
|
||||||
if (!existsSync(contextFile)) {
|
if (!(await fileExists(contextFile))) {
|
||||||
// Fallback to session root
|
// Fallback to session root
|
||||||
contextFile = join(normalizedPath, 'context-package.json');
|
contextFile = join(normalizedPath, 'context-package.json');
|
||||||
}
|
}
|
||||||
if (existsSync(contextFile)) {
|
if (await fileExists(contextFile)) {
|
||||||
try {
|
try {
|
||||||
result.context = JSON.parse(readFileSync(contextFile, 'utf8'));
|
result.context = JSON.parse(await readFile(contextFile, 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.warn('Failed to parse context file:', contextFile, (e as Error).message);
|
||||||
result.context = null;
|
result.context = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -40,18 +56,18 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
if (dataType === 'tasks' || dataType === 'all') {
|
if (dataType === 'tasks' || dataType === 'all') {
|
||||||
const taskDir = join(normalizedPath, '.task');
|
const taskDir = join(normalizedPath, '.task');
|
||||||
result.tasks = [];
|
result.tasks = [];
|
||||||
if (existsSync(taskDir)) {
|
if (await fileExists(taskDir)) {
|
||||||
const files = readdirSync(taskDir).filter(f => f.endsWith('.json') && f.startsWith('IMPL-'));
|
const files = (await readdir(taskDir)).filter(f => f.endsWith('.json') && f.startsWith('IMPL-'));
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const content = JSON.parse(readFileSync(join(taskDir, file), 'utf8'));
|
const content = JSON.parse(await readFile(join(taskDir, file), 'utf8'));
|
||||||
result.tasks.push({
|
result.tasks.push({
|
||||||
filename: file,
|
filename: file,
|
||||||
task_id: file.replace('.json', ''),
|
task_id: file.replace('.json', ''),
|
||||||
...content
|
...content
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable files
|
console.warn('Failed to parse task file:', join(taskDir, file), (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Sort by task ID
|
// Sort by task ID
|
||||||
@@ -63,14 +79,14 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
if (dataType === 'summary' || dataType === 'all') {
|
if (dataType === 'summary' || dataType === 'all') {
|
||||||
const summariesDir = join(normalizedPath, '.summaries');
|
const summariesDir = join(normalizedPath, '.summaries');
|
||||||
result.summaries = [];
|
result.summaries = [];
|
||||||
if (existsSync(summariesDir)) {
|
if (await fileExists(summariesDir)) {
|
||||||
const files = readdirSync(summariesDir).filter(f => f.endsWith('.md'));
|
const files = (await readdir(summariesDir)).filter(f => f.endsWith('.md'));
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const content = readFileSync(join(summariesDir, file), 'utf8');
|
const content = await readFile(join(summariesDir, file), 'utf8');
|
||||||
result.summaries.push({ name: file.replace('.md', ''), content });
|
result.summaries.push({ name: file.replace('.md', ''), content });
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable files
|
console.warn('Failed to read summary file:', join(summariesDir, file), (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -79,10 +95,11 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
// Load plan.json (for lite tasks)
|
// Load plan.json (for lite tasks)
|
||||||
if (dataType === 'plan' || dataType === 'all') {
|
if (dataType === 'plan' || dataType === 'all') {
|
||||||
const planFile = join(normalizedPath, 'plan.json');
|
const planFile = join(normalizedPath, 'plan.json');
|
||||||
if (existsSync(planFile)) {
|
if (await fileExists(planFile)) {
|
||||||
try {
|
try {
|
||||||
result.plan = JSON.parse(readFileSync(planFile, 'utf8'));
|
result.plan = JSON.parse(await readFile(planFile, 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.warn('Failed to parse plan file:', planFile, (e as Error).message);
|
||||||
result.plan = null;
|
result.plan = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -100,52 +117,54 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (const searchDir of searchDirs) {
|
for (const searchDir of searchDirs) {
|
||||||
if (!existsSync(searchDir)) continue;
|
if (!(await fileExists(searchDir))) continue;
|
||||||
|
|
||||||
// Look for explorations-manifest.json
|
// Look for explorations-manifest.json
|
||||||
const manifestFile = join(searchDir, 'explorations-manifest.json');
|
const manifestFile = join(searchDir, 'explorations-manifest.json');
|
||||||
if (existsSync(manifestFile)) {
|
if (await fileExists(manifestFile)) {
|
||||||
try {
|
try {
|
||||||
result.explorations.manifest = JSON.parse(readFileSync(manifestFile, 'utf8'));
|
result.explorations.manifest = JSON.parse(await readFile(manifestFile, 'utf8'));
|
||||||
|
|
||||||
// Load each exploration file based on manifest
|
// Load each exploration file based on manifest
|
||||||
const explorations = result.explorations.manifest.explorations || [];
|
const explorations = result.explorations.manifest.explorations || [];
|
||||||
for (const exp of explorations) {
|
for (const exp of explorations) {
|
||||||
const expFile = join(searchDir, exp.file);
|
const expFile = join(searchDir, exp.file);
|
||||||
if (existsSync(expFile)) {
|
if (await fileExists(expFile)) {
|
||||||
try {
|
try {
|
||||||
result.explorations.data[exp.angle] = JSON.parse(readFileSync(expFile, 'utf8'));
|
result.explorations.data[exp.angle] = JSON.parse(await readFile(expFile, 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable exploration files
|
console.warn('Failed to parse exploration file:', expFile, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break; // Found manifest, stop searching
|
break; // Found manifest, stop searching
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.warn('Failed to parse explorations manifest:', manifestFile, (e as Error).message);
|
||||||
result.explorations.manifest = null;
|
result.explorations.manifest = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for diagnoses-manifest.json
|
// Look for diagnoses-manifest.json
|
||||||
const diagManifestFile = join(searchDir, 'diagnoses-manifest.json');
|
const diagManifestFile = join(searchDir, 'diagnoses-manifest.json');
|
||||||
if (existsSync(diagManifestFile)) {
|
if (await fileExists(diagManifestFile)) {
|
||||||
try {
|
try {
|
||||||
result.diagnoses.manifest = JSON.parse(readFileSync(diagManifestFile, 'utf8'));
|
result.diagnoses.manifest = JSON.parse(await readFile(diagManifestFile, 'utf8'));
|
||||||
|
|
||||||
// Load each diagnosis file based on manifest
|
// Load each diagnosis file based on manifest
|
||||||
const diagnoses = result.diagnoses.manifest.diagnoses || [];
|
const diagnoses = result.diagnoses.manifest.diagnoses || [];
|
||||||
for (const diag of diagnoses) {
|
for (const diag of diagnoses) {
|
||||||
const diagFile = join(searchDir, diag.file);
|
const diagFile = join(searchDir, diag.file);
|
||||||
if (existsSync(diagFile)) {
|
if (await fileExists(diagFile)) {
|
||||||
try {
|
try {
|
||||||
result.diagnoses.data[diag.angle] = JSON.parse(readFileSync(diagFile, 'utf8'));
|
result.diagnoses.data[diag.angle] = JSON.parse(await readFile(diagFile, 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable diagnosis files
|
console.warn('Failed to parse diagnosis file:', diagFile, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
break; // Found manifest, stop searching
|
break; // Found manifest, stop searching
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.warn('Failed to parse diagnoses manifest:', diagManifestFile, (e as Error).message);
|
||||||
result.diagnoses.manifest = null;
|
result.diagnoses.manifest = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -153,7 +172,7 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
// Fallback: scan for exploration-*.json and diagnosis-*.json files directly
|
// Fallback: scan for exploration-*.json and diagnosis-*.json files directly
|
||||||
if (!result.explorations.manifest) {
|
if (!result.explorations.manifest) {
|
||||||
try {
|
try {
|
||||||
const expFiles = readdirSync(searchDir).filter(f => f.startsWith('exploration-') && f.endsWith('.json') && f !== 'explorations-manifest.json');
|
const expFiles = (await readdir(searchDir)).filter(f => f.startsWith('exploration-') && f.endsWith('.json') && f !== 'explorations-manifest.json');
|
||||||
if (expFiles.length > 0) {
|
if (expFiles.length > 0) {
|
||||||
// Create synthetic manifest
|
// Create synthetic manifest
|
||||||
result.explorations.manifest = {
|
result.explorations.manifest = {
|
||||||
@@ -169,21 +188,21 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
for (const file of expFiles) {
|
for (const file of expFiles) {
|
||||||
const angle = file.replace('exploration-', '').replace('.json', '');
|
const angle = file.replace('exploration-', '').replace('.json', '');
|
||||||
try {
|
try {
|
||||||
result.explorations.data[angle] = JSON.parse(readFileSync(join(searchDir, file), 'utf8'));
|
result.explorations.data[angle] = JSON.parse(await readFile(join(searchDir, file), 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable files
|
console.warn('Failed to parse exploration file:', join(searchDir, file), (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Directory read failed
|
console.warn('Failed to read explorations directory:', searchDir, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: scan for diagnosis-*.json files directly
|
// Fallback: scan for diagnosis-*.json files directly
|
||||||
if (!result.diagnoses.manifest) {
|
if (!result.diagnoses.manifest) {
|
||||||
try {
|
try {
|
||||||
const diagFiles = readdirSync(searchDir).filter(f => f.startsWith('diagnosis-') && f.endsWith('.json') && f !== 'diagnoses-manifest.json');
|
const diagFiles = (await readdir(searchDir)).filter(f => f.startsWith('diagnosis-') && f.endsWith('.json') && f !== 'diagnoses-manifest.json');
|
||||||
if (diagFiles.length > 0) {
|
if (diagFiles.length > 0) {
|
||||||
// Create synthetic manifest
|
// Create synthetic manifest
|
||||||
result.diagnoses.manifest = {
|
result.diagnoses.manifest = {
|
||||||
@@ -199,14 +218,14 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
for (const file of diagFiles) {
|
for (const file of diagFiles) {
|
||||||
const angle = file.replace('diagnosis-', '').replace('.json', '');
|
const angle = file.replace('diagnosis-', '').replace('.json', '');
|
||||||
try {
|
try {
|
||||||
result.diagnoses.data[angle] = JSON.parse(readFileSync(join(searchDir, file), 'utf8'));
|
result.diagnoses.data[angle] = JSON.parse(await readFile(join(searchDir, file), 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable files
|
console.warn('Failed to parse diagnosis file:', join(searchDir, file), (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Directory read failed
|
console.warn('Failed to read diagnoses directory:', searchDir, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -228,12 +247,12 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (const conflictFile of conflictFiles) {
|
for (const conflictFile of conflictFiles) {
|
||||||
if (existsSync(conflictFile)) {
|
if (await fileExists(conflictFile)) {
|
||||||
try {
|
try {
|
||||||
result.conflictResolution = JSON.parse(readFileSync(conflictFile, 'utf8'));
|
result.conflictResolution = JSON.parse(await readFile(conflictFile, 'utf8'));
|
||||||
break; // Found file, stop searching
|
break; // Found file, stop searching
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable file
|
console.warn('Failed to parse conflict resolution file:', conflictFile, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -242,27 +261,60 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
// Load IMPL_PLAN.md
|
// Load IMPL_PLAN.md
|
||||||
if (dataType === 'impl-plan' || dataType === 'all') {
|
if (dataType === 'impl-plan' || dataType === 'all') {
|
||||||
const implPlanFile = join(normalizedPath, 'IMPL_PLAN.md');
|
const implPlanFile = join(normalizedPath, 'IMPL_PLAN.md');
|
||||||
if (existsSync(implPlanFile)) {
|
if (await fileExists(implPlanFile)) {
|
||||||
try {
|
try {
|
||||||
result.implPlan = readFileSync(implPlanFile, 'utf8');
|
result.implPlan = await readFile(implPlanFile, 'utf8');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
console.warn('Failed to read IMPL_PLAN.md:', implPlanFile, (e as Error).message);
|
||||||
result.implPlan = null;
|
result.implPlan = null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load multi-cli discussion rounds (rounds/*/synthesis.json)
|
// Load multi-cli discussion rounds (rounds/*/synthesis.json)
|
||||||
|
// Supports both NEW and OLD schema formats
|
||||||
if (dataType === 'multi-cli' || dataType === 'discussions' || dataType === 'all') {
|
if (dataType === 'multi-cli' || dataType === 'discussions' || dataType === 'all') {
|
||||||
result.multiCli = {
|
result.multiCli = {
|
||||||
sessionId: normalizedPath.split('/').pop() || '',
|
sessionId: normalizedPath.split('/').pop() || '',
|
||||||
type: 'multi-cli-plan',
|
type: 'multi-cli-plan',
|
||||||
rounds: [] as Array<{ roundNumber: number; synthesis: Record<string, unknown> | null }>
|
rounds: [] as Array<{
|
||||||
|
roundNumber: number;
|
||||||
|
synthesis: Record<string, unknown> | null;
|
||||||
|
// NEW schema extracted fields
|
||||||
|
solutions?: Array<{
|
||||||
|
name: string;
|
||||||
|
source_cli: string[];
|
||||||
|
feasibility: number;
|
||||||
|
effort: string;
|
||||||
|
risk: string;
|
||||||
|
summary: string;
|
||||||
|
tasksCount: number;
|
||||||
|
dependencies: { internal: string[]; external: string[] };
|
||||||
|
technical_concerns: string[];
|
||||||
|
}>;
|
||||||
|
convergence?: {
|
||||||
|
score: number;
|
||||||
|
new_insights: boolean;
|
||||||
|
recommendation: string;
|
||||||
|
};
|
||||||
|
cross_verification?: {
|
||||||
|
agreements: string[];
|
||||||
|
disagreements: string[];
|
||||||
|
resolution: string;
|
||||||
|
};
|
||||||
|
clarification_questions?: string[];
|
||||||
|
}>,
|
||||||
|
// Aggregated data from latest synthesis
|
||||||
|
latestSolutions: [] as Array<Record<string, unknown>>,
|
||||||
|
latestConvergence: null as Record<string, unknown> | null,
|
||||||
|
latestCrossVerification: null as Record<string, unknown> | null,
|
||||||
|
clarificationQuestions: [] as string[]
|
||||||
};
|
};
|
||||||
|
|
||||||
const roundsDir = join(normalizedPath, 'rounds');
|
const roundsDir = join(normalizedPath, 'rounds');
|
||||||
if (existsSync(roundsDir)) {
|
if (await fileExists(roundsDir)) {
|
||||||
try {
|
try {
|
||||||
const roundDirs = readdirSync(roundsDir)
|
const roundDirs = (await readdir(roundsDir))
|
||||||
.filter(d => /^\d+$/.test(d)) // Only numeric directories
|
.filter(d => /^\d+$/.test(d)) // Only numeric directories
|
||||||
.sort((a, b) => parseInt(a) - parseInt(b));
|
.sort((a, b) => parseInt(a) - parseInt(b));
|
||||||
|
|
||||||
@@ -270,21 +322,84 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
const synthesisFile = join(roundsDir, roundDir, 'synthesis.json');
|
const synthesisFile = join(roundsDir, roundDir, 'synthesis.json');
|
||||||
let synthesis: Record<string, unknown> | null = null;
|
let synthesis: Record<string, unknown> | null = null;
|
||||||
|
|
||||||
if (existsSync(synthesisFile)) {
|
if (await fileExists(synthesisFile)) {
|
||||||
try {
|
try {
|
||||||
synthesis = JSON.parse(readFileSync(synthesisFile, 'utf8'));
|
synthesis = JSON.parse(await readFile(synthesisFile, 'utf8'));
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable synthesis files
|
console.warn('Failed to parse synthesis file:', synthesisFile, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result.multiCli.rounds.push({
|
// Build round data with NEW schema fields extracted
|
||||||
|
const roundData: any = {
|
||||||
roundNumber: parseInt(roundDir),
|
roundNumber: parseInt(roundDir),
|
||||||
synthesis
|
synthesis
|
||||||
});
|
};
|
||||||
|
|
||||||
|
// Extract NEW schema fields if present
|
||||||
|
if (synthesis) {
|
||||||
|
// Extract solutions with summary info
|
||||||
|
if (Array.isArray(synthesis.solutions)) {
|
||||||
|
roundData.solutions = (synthesis.solutions as Array<Record<string, any>>).map(s => ({
|
||||||
|
name: s.name || '',
|
||||||
|
source_cli: s.source_cli || [],
|
||||||
|
feasibility: s.feasibility ?? 0,
|
||||||
|
effort: s.effort || 'unknown',
|
||||||
|
risk: s.risk || 'unknown',
|
||||||
|
summary: s.summary || '',
|
||||||
|
tasksCount: s.implementation_plan?.tasks?.length || 0,
|
||||||
|
dependencies: s.dependencies || { internal: [], external: [] },
|
||||||
|
technical_concerns: s.technical_concerns || []
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract convergence
|
||||||
|
if (synthesis.convergence && typeof synthesis.convergence === 'object') {
|
||||||
|
const conv = synthesis.convergence as Record<string, unknown>;
|
||||||
|
roundData.convergence = {
|
||||||
|
score: conv.score ?? 0,
|
||||||
|
new_insights: conv.new_insights ?? false,
|
||||||
|
recommendation: conv.recommendation || 'unknown'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract cross_verification
|
||||||
|
if (synthesis.cross_verification && typeof synthesis.cross_verification === 'object') {
|
||||||
|
const cv = synthesis.cross_verification as Record<string, unknown>;
|
||||||
|
roundData.cross_verification = {
|
||||||
|
agreements: Array.isArray(cv.agreements) ? cv.agreements : [],
|
||||||
|
disagreements: Array.isArray(cv.disagreements) ? cv.disagreements : [],
|
||||||
|
resolution: (cv.resolution as string) || ''
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract clarification_questions
|
||||||
|
if (Array.isArray(synthesis.clarification_questions)) {
|
||||||
|
roundData.clarification_questions = synthesis.clarification_questions;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.multiCli.rounds.push(roundData);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Populate aggregated data from latest round
|
||||||
|
if (result.multiCli.rounds.length > 0) {
|
||||||
|
const latestRound = result.multiCli.rounds[result.multiCli.rounds.length - 1];
|
||||||
|
if (latestRound.solutions) {
|
||||||
|
result.multiCli.latestSolutions = latestRound.solutions;
|
||||||
|
}
|
||||||
|
if (latestRound.convergence) {
|
||||||
|
result.multiCli.latestConvergence = latestRound.convergence;
|
||||||
|
}
|
||||||
|
if (latestRound.cross_verification) {
|
||||||
|
result.multiCli.latestCrossVerification = latestRound.cross_verification;
|
||||||
|
}
|
||||||
|
if (latestRound.clarification_questions) {
|
||||||
|
result.multiCli.clarificationQuestions = latestRound.clarification_questions;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Directory read failed
|
console.warn('Failed to read rounds directory:', roundsDir, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -299,12 +414,12 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
totalFindings: 0
|
totalFindings: 0
|
||||||
};
|
};
|
||||||
|
|
||||||
if (existsSync(reviewDir)) {
|
if (await fileExists(reviewDir)) {
|
||||||
// Load review-state.json
|
// Load review-state.json
|
||||||
const stateFile = join(reviewDir, 'review-state.json');
|
const stateFile = join(reviewDir, 'review-state.json');
|
||||||
if (existsSync(stateFile)) {
|
if (await fileExists(stateFile)) {
|
||||||
try {
|
try {
|
||||||
const state = JSON.parse(readFileSync(stateFile, 'utf8'));
|
const state = JSON.parse(await readFile(stateFile, 'utf8'));
|
||||||
result.review.state = state;
|
result.review.state = state;
|
||||||
result.review.severityDistribution = state.severity_distribution || {};
|
result.review.severityDistribution = state.severity_distribution || {};
|
||||||
result.review.totalFindings = state.total_findings || 0;
|
result.review.totalFindings = state.total_findings || 0;
|
||||||
@@ -313,18 +428,18 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
result.review.crossCuttingConcerns = state.cross_cutting_concerns || [];
|
result.review.crossCuttingConcerns = state.cross_cutting_concerns || [];
|
||||||
result.review.criticalFiles = state.critical_files || [];
|
result.review.criticalFiles = state.critical_files || [];
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable state
|
console.warn('Failed to parse review state file:', stateFile, (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load dimension findings
|
// Load dimension findings
|
||||||
const dimensionsDir = join(reviewDir, 'dimensions');
|
const dimensionsDir = join(reviewDir, 'dimensions');
|
||||||
if (existsSync(dimensionsDir)) {
|
if (await fileExists(dimensionsDir)) {
|
||||||
const files = readdirSync(dimensionsDir).filter(f => f.endsWith('.json'));
|
const files = (await readdir(dimensionsDir)).filter(f => f.endsWith('.json'));
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const dimName = file.replace('.json', '');
|
const dimName = file.replace('.json', '');
|
||||||
const data = JSON.parse(readFileSync(join(dimensionsDir, file), 'utf8'));
|
const data = JSON.parse(await readFile(join(dimensionsDir, file), 'utf8'));
|
||||||
|
|
||||||
// Handle array structure: [ { findings: [...] } ]
|
// Handle array structure: [ { findings: [...] } ]
|
||||||
let findings = [];
|
let findings = [];
|
||||||
@@ -346,7 +461,7 @@ async function getSessionDetailData(sessionPath: string, dataType: string): Prom
|
|||||||
count: findings.length
|
count: findings.length
|
||||||
});
|
});
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Skip unreadable files
|
console.warn('Failed to parse review dimension file:', join(dimensionsDir, file), (e as Error).message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user