mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-03-29 20:11:04 +08:00
feat: add json_builder tool with schema-aware JSON construction and validation
Unified tool replacing manual schema reading (cat schema) across all agents and skills. Supports 5 commands: init (skeleton), set (incremental field setting with instant validation), validate (full structural + semantic), merge (dedup multiple JSONs), info (compact schema summary). Registers 24 schemas in schema-registry.ts. Updates all agent/skill/command files to use json_builder info/init/validate instead of cat schema references. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -829,6 +829,12 @@ Generate at `.workflow/active/{session_id}/plan.json` following `plan-overview-b
|
||||
|
||||
**Generation Timing**: After all `.task/IMPL-*.json` files are generated, aggregate into plan.json.
|
||||
|
||||
**Validation**: After writing plan.json and task files, validate with json_builder:
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":"<session>/plan.json","schema":"plan"}'
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":"<session>/.task/IMPL-001.json","schema":"task"}'
|
||||
```
|
||||
|
||||
### 2.3 IMPL_PLAN.md Structure
|
||||
|
||||
**Template-Based Generation**:
|
||||
|
||||
@@ -5,6 +5,7 @@ description: |
|
||||
Orchestrates 4-phase workflow: Task Understanding → Analysis Execution → Schema Validation → Output Generation.
|
||||
Spawned by /explore command orchestrator.
|
||||
tools: Read, Bash, Glob, Grep
|
||||
# json_builder available via: ccw tool exec json_builder '{"cmd":"..."}' (Bash)
|
||||
color: yellow
|
||||
---
|
||||
|
||||
@@ -66,9 +67,9 @@ Phase 4: Output Generation
|
||||
Store result as `project_structure` for module-aware file discovery in Phase 2.
|
||||
|
||||
2. **Output Schema Loading** (if output file path specified in prompt):
|
||||
- Exploration output → `cat ~/.ccw/workflows/cli-templates/schemas/explore-json-schema.json`
|
||||
- Other schemas as specified in prompt
|
||||
Read and memorize schema requirements BEFORE any analysis begins (feeds Phase 3 validation).
|
||||
- Get schema summary: `ccw tool exec json_builder '{"cmd":"info","schema":"explore"}'` (or "diagnosis" for bug analysis)
|
||||
- Initialize output file: `ccw tool exec json_builder '{"cmd":"init","schema":"explore","output":"<output_path>"}'`
|
||||
- The tool returns requiredFields, arrayFields, and enumFields — memorize these for Phase 2.
|
||||
|
||||
3. **Project Context Loading** (from spec system):
|
||||
- Load exploration specs using: `ccw spec load --category exploration`
|
||||
@@ -150,55 +151,56 @@ RULES: {from prompt, if template specified} | analysis=READ-ONLY
|
||||
---
|
||||
|
||||
<schema_validation>
|
||||
## Phase 3: Schema Validation
|
||||
## Phase 3: Incremental Build & Validation (via json_builder)
|
||||
|
||||
### CRITICAL: Schema Compliance Protocol
|
||||
**This phase replaces manual JSON writing + self-validation with tool-assisted construction.**
|
||||
|
||||
**This phase is MANDATORY when schema file is specified in prompt.**
|
||||
|
||||
**Step 1: Read Schema FIRST**
|
||||
```
|
||||
Read(schema_file_path)
|
||||
**Step 1: Set text fields** (discovered during Phase 2 analysis)
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"set","target":"<output_path>","ops":[
|
||||
{"path":"project_structure","value":"..."},
|
||||
{"path":"patterns","value":"..."},
|
||||
{"path":"dependencies","value":"..."},
|
||||
{"path":"integration_points","value":"..."},
|
||||
{"path":"constraints","value":"..."}
|
||||
]}'
|
||||
```
|
||||
|
||||
**Step 2: Extract Schema Requirements**
|
||||
**Step 2: Append file entries** (as discovered — one `set` per batch)
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"set","target":"<output_path>","ops":[
|
||||
{"path":"relevant_files[+]","value":{"path":"src/auth.ts","relevance":0.9,"rationale":"Contains AuthService.login() entry point for JWT generation","role":"modify_target","discovery_source":"bash-scan","key_code":[{"symbol":"login()","location":"L45-78","description":"JWT token generation with bcrypt verification"}],"topic_relation":"Security target — JWT generation lacks token rotation"}},
|
||||
{"path":"relevant_files[+]","value":{...}}
|
||||
]}'
|
||||
```
|
||||
|
||||
Parse and memorize:
|
||||
1. **Root structure** - Is it array `[...]` or object `{...}`?
|
||||
2. **Required fields** - List all `"required": [...]` arrays
|
||||
3. **Field names EXACTLY** - Copy character-by-character (case-sensitive)
|
||||
4. **Enum values** - Copy exact strings (e.g., `"critical"` not `"Critical"`)
|
||||
5. **Nested structures** - Note flat vs nested requirements
|
||||
The tool **automatically validates** each operation:
|
||||
- enum values (role, discovery_source) → rejects invalid
|
||||
- minLength (rationale >= 10) → rejects too short
|
||||
- type checking → rejects wrong types
|
||||
|
||||
**Step 3: File Rationale Validation** (MANDATORY for relevant_files / affected_files)
|
||||
**Step 3: Set metadata**
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"set","target":"<output_path>","ops":[
|
||||
{"path":"_metadata.timestamp","value":"auto"},
|
||||
{"path":"_metadata.task_description","value":"..."},
|
||||
{"path":"_metadata.source","value":"cli-explore-agent"},
|
||||
{"path":"_metadata.exploration_angle","value":"..."},
|
||||
{"path":"_metadata.exploration_index","value":1},
|
||||
{"path":"_metadata.total_explorations","value":2}
|
||||
]}'
|
||||
```
|
||||
|
||||
Every file entry MUST have:
|
||||
- `rationale` (required, minLength 10): Specific reason tied to the exploration topic, NOT generic
|
||||
- GOOD: "Contains AuthService.login() which is the entry point for JWT token generation"
|
||||
- BAD: "Related to auth" or "Relevant file"
|
||||
- `role` (required, enum): Structural classification of why it was selected
|
||||
- `discovery_source` (optional but recommended): How the file was found
|
||||
- `key_code` (strongly recommended for relevance >= 0.7): Array of {symbol, location?, description}
|
||||
- GOOD: [{"symbol": "AuthService.login()", "location": "L45-L78", "description": "JWT token generation with bcrypt verification, returns token pair"}]
|
||||
- BAD: [{"symbol": "login", "description": "login function"}]
|
||||
- `topic_relation` (strongly recommended for relevance >= 0.7): Connection from exploration angle perspective
|
||||
- GOOD: "Security exploration targets this file because JWT generation lacks token rotation"
|
||||
- BAD: "Related to security"
|
||||
**Step 4: Final validation**
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":"<output_path>"}'
|
||||
```
|
||||
Returns `{valid, errors, warnings, stats}`. If errors exist → fix with `set` → re-validate.
|
||||
|
||||
**Step 4: Pre-Output Validation Checklist**
|
||||
|
||||
Before writing ANY JSON output, verify:
|
||||
|
||||
- [ ] Root structure matches schema (array vs object)
|
||||
- [ ] ALL required fields present at each level
|
||||
- [ ] Field names EXACTLY match schema (character-by-character)
|
||||
- [ ] Enum values EXACTLY match schema (case-sensitive)
|
||||
- [ ] Nested structures follow schema pattern (flat vs nested)
|
||||
- [ ] Data types correct (string, integer, array, object)
|
||||
- [ ] Every file in relevant_files has: path + relevance + rationale + role
|
||||
- [ ] Every rationale is specific (>10 chars, not generic)
|
||||
- [ ] Files with relevance >= 0.7 have key_code with symbol + description (minLength 10)
|
||||
- [ ] Files with relevance >= 0.7 have topic_relation explaining connection to angle (minLength 15)
|
||||
**Quality reminders** (enforced by tool, but be aware):
|
||||
- `rationale`: Must be specific, not generic ("Related to auth" → rejected by semantic check)
|
||||
- `key_code`: Strongly recommended for relevance >= 0.7 (warnings if missing)
|
||||
- `topic_relation`: Strongly recommended for relevance >= 0.7 (warnings if missing)
|
||||
</schema_validation>
|
||||
|
||||
---
|
||||
@@ -212,16 +214,12 @@ Brief summary:
|
||||
- Task completion status
|
||||
- Key findings summary
|
||||
- Generated file paths (if any)
|
||||
- Validation result (from Phase 3 Step 4)
|
||||
|
||||
### File Output (as specified in prompt)
|
||||
### File Output
|
||||
|
||||
**MANDATORY WORKFLOW**:
|
||||
|
||||
1. `Read()` schema file BEFORE generating output
|
||||
2. Extract ALL field names from schema
|
||||
3. Build JSON using ONLY schema field names
|
||||
4. Validate against checklist before writing
|
||||
5. Write file with validated content
|
||||
File is already written by json_builder during Phase 3 (init + set operations).
|
||||
Phase 4 only verifies the final validation passed and returns the summary.
|
||||
</output_generation>
|
||||
|
||||
---
|
||||
@@ -243,28 +241,19 @@ Brief summary:
|
||||
|
||||
**ALWAYS**:
|
||||
1. **Search Tool Priority**: ACE (`mcp__ace-tool__search_context`) → CCW (`mcp__ccw-tools__smart_search`) / Built-in (`Grep`, `Glob`, `Read`)
|
||||
2. Read schema file FIRST before generating any output (if schema specified)
|
||||
3. Copy field names EXACTLY from schema (case-sensitive)
|
||||
4. Verify root structure matches schema (array vs object)
|
||||
5. Match nested/flat structures as schema requires
|
||||
6. Use exact enum values from schema (case-sensitive)
|
||||
7. Include ALL required fields at every level
|
||||
8. Include file:line references in findings
|
||||
9. **Every file MUST have rationale**: Specific selection basis tied to the topic (not generic)
|
||||
10. **Every file MUST have role**: Classify as modify_target/dependency/pattern_reference/test_target/type_definition/integration_point/config/context_only
|
||||
11. **Track discovery source**: Record how each file was found (bash-scan/cli-analysis/ace-search/dependency-trace/manual)
|
||||
12. **Populate key_code for high-relevance files**: relevance >= 0.7 → key_code array with symbol, location, description
|
||||
13. **Populate topic_relation for high-relevance files**: relevance >= 0.7 → topic_relation explaining file-to-angle connection
|
||||
2. **Use json_builder** for all JSON output: `init` → `set` (incremental) → `validate`
|
||||
3. Include file:line references in findings
|
||||
4. **Every file MUST have rationale + role** (enforced by json_builder set validation)
|
||||
5. **Track discovery source**: Record how each file was found (bash-scan/cli-analysis/ace-search/dependency-trace/manual)
|
||||
6. **Populate key_code + topic_relation for high-relevance files** (relevance >= 0.7; json_builder warns if missing)
|
||||
|
||||
**Bash Tool**:
|
||||
- Use `run_in_background=false` for all Bash/CLI calls to ensure foreground execution
|
||||
|
||||
**NEVER**:
|
||||
1. Modify any files (read-only agent)
|
||||
2. Skip schema reading step when schema is specified
|
||||
3. Guess field names - ALWAYS copy from schema
|
||||
4. Assume structure - ALWAYS verify against schema
|
||||
5. Omit required fields
|
||||
1. Modify any source code files (read-only agent — json_builder writes only output JSON)
|
||||
2. Hand-write JSON output — always use json_builder
|
||||
3. Skip the `validate` step before returning
|
||||
</operational_rules>
|
||||
|
||||
<output_contract>
|
||||
@@ -282,11 +271,8 @@ When exploration is complete, return one of:
|
||||
|
||||
Before returning, verify:
|
||||
- [ ] All 4 phases were executed (or skipped with justification)
|
||||
- [ ] Schema was read BEFORE output generation (if schema specified)
|
||||
- [ ] All field names match schema exactly (case-sensitive)
|
||||
- [ ] Every file entry has rationale (specific, >10 chars) and role
|
||||
- [ ] High-relevance files (>= 0.7) have key_code and topic_relation
|
||||
- [ ] json_builder `init` was called at start
|
||||
- [ ] json_builder `validate` returned `valid: true` (or all errors were fixed)
|
||||
- [ ] Discovery sources are tracked for all findings
|
||||
- [ ] No files were modified (read-only agent)
|
||||
- [ ] Output format matches schema root structure (array vs object)
|
||||
- [ ] No source code files were modified (read-only agent)
|
||||
</quality_gate>
|
||||
|
||||
@@ -139,16 +139,15 @@ When `process_docs: true`, generate planning-context.md before sub-plan.json:
|
||||
|
||||
## Schema-Driven Output
|
||||
|
||||
**CRITICAL**: Read the schema reference first to determine output structure:
|
||||
- `plan-overview-base-schema.json` → Implementation plan with `approach`, `complexity`
|
||||
- `plan-overview-fix-schema.json` → Fix plan with `root_cause`, `severity`, `risk_level`
|
||||
**CRITICAL**: Get schema info via json_builder to determine output structure:
|
||||
- `ccw tool exec json_builder '{"cmd":"info","schema":"plan"}'` → Implementation plan with `approach`, `complexity`
|
||||
- `ccw tool exec json_builder '{"cmd":"info","schema":"plan-fix"}'` → Fix plan with `root_cause`, `severity`, `risk_level`
|
||||
|
||||
```javascript
|
||||
// Step 1: Always read schema first
|
||||
const schema = Bash(`cat ${schema_path}`)
|
||||
|
||||
// Step 2: Generate plan conforming to schema
|
||||
const planObject = generatePlanFromSchema(schema, context)
|
||||
After generating plan.json and .task/*.json, validate:
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":"<session>/plan.json","schema":"plan"}'
|
||||
# For each task file:
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":"<session>/.task/TASK-001.json","schema":"task"}'
|
||||
```
|
||||
|
||||
</schema_driven_output>
|
||||
@@ -863,7 +862,7 @@ function validateTask(task) {
|
||||
|
||||
**ALWAYS**:
|
||||
- **Search Tool Priority**: ACE (`mcp__ace-tool__search_context`) → CCW (`mcp__ccw-tools__smart_search`) / Built-in (`Grep`, `Glob`, `Read`)
|
||||
- **Read schema first** to determine output structure
|
||||
- **Get schema info via json_builder** to determine output structure
|
||||
- Generate task IDs (TASK-001/TASK-002 for plan, FIX-001/FIX-002 for fix-plan)
|
||||
- Include depends_on (even if empty [])
|
||||
- **Assign cli_execution_id** (`{sessionId}-{taskId}`)
|
||||
@@ -981,7 +980,7 @@ Upon completion, return one of:
|
||||
|
||||
Before returning, verify:
|
||||
|
||||
- [ ] Schema reference was read and output structure matches schema type (base vs fix)
|
||||
- [ ] Schema info was obtained via json_builder and output structure matches schema type (base vs fix)
|
||||
- [ ] All tasks have valid IDs (TASK-NNN or FIX-NNN format)
|
||||
- [ ] All tasks have 2+ implementation steps
|
||||
- [ ] All convergence criteria are quantified and testable (no vague language)
|
||||
|
||||
@@ -348,7 +348,7 @@ Write({ file_path: filePath, content: newContent })
|
||||
.workflow/issues/solutions/{issue-id}.jsonl
|
||||
```
|
||||
|
||||
Each line is a solution JSON containing tasks. Schema: `cat ~/.ccw/workflows/cli-templates/schemas/solution-schema.json`
|
||||
Each line is a solution JSON containing tasks. Schema: `ccw tool exec json_builder '{"cmd":"info","schema":"solution"}'`
|
||||
|
||||
### 2.2 Return Summary
|
||||
|
||||
@@ -388,7 +388,7 @@ Each line is a solution JSON containing tasks. Schema: `cat ~/.ccw/workflows/cli
|
||||
|
||||
**ALWAYS**:
|
||||
1. **Search Tool Priority**: ACE (`mcp__ace-tool__search_context`) → CCW (`mcp__ccw-tools__smart_search`) / Built-in (`Grep`, `Glob`, `Read`)
|
||||
2. Read schema first: `cat ~/.ccw/workflows/cli-templates/schemas/solution-schema.json`
|
||||
2. Get schema info: `ccw tool exec json_builder '{"cmd":"info","schema":"solution"}'` (replaces reading raw schema)
|
||||
3. Use ACE semantic search as PRIMARY exploration tool
|
||||
4. Fetch issue details via `ccw issue status <id> --json`
|
||||
5. **Analyze failure history**: Check `issue.feedback` for type='failure', stage='execute'
|
||||
@@ -408,6 +408,11 @@ Each line is a solution JSON containing tasks. Schema: `cat ~/.ccw/workflows/cli
|
||||
4. **Dependency ordering**: If issues must touch same files, encode execution order via `depends_on`
|
||||
5. **Scope minimization**: Prefer smaller, focused modifications over broad refactoring
|
||||
|
||||
**VALIDATE**: After writing solution JSONL, validate each solution:
|
||||
```bash
|
||||
ccw tool exec json_builder '{"cmd":"validate","target":".workflow/issues/solutions/<issue-id>.jsonl","schema":"solution"}'
|
||||
```
|
||||
|
||||
**NEVER**:
|
||||
1. Execute implementation (return plan only)
|
||||
2. Use vague criteria ("works correctly", "good performance")
|
||||
|
||||
@@ -129,7 +129,7 @@ Task(
|
||||
Analyze project for workflow initialization and generate .workflow/project-tech.json.
|
||||
|
||||
## MANDATORY FIRST STEPS
|
||||
1. Execute: cat ~/.ccw/workflows/cli-templates/schemas/project-tech-schema.json (get schema reference)
|
||||
1. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"tech"}' (get schema summary)
|
||||
2. Execute: ccw tool exec get_modules_by_depth '{}' (get project structure)
|
||||
|
||||
## Task
|
||||
|
||||
@@ -401,7 +401,7 @@ Task(
|
||||
1. Read review state: ${reviewStateJsonPath}
|
||||
2. Get target files: Read resolved_files from review-state.json
|
||||
3. Validate file access: bash(ls -la ${targetFiles.join(' ')})
|
||||
4. Execute: cat ~/.ccw/workflows/cli-templates/schemas/review-dimension-results-schema.json (get output schema reference)
|
||||
4. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"review-dim"}' (get output schema summary)
|
||||
5. Read: .workflow/project-tech.json (technology stack and architecture context)
|
||||
6. Read: .workflow/specs/*.md (user-defined constraints and conventions to validate against)
|
||||
|
||||
@@ -456,7 +456,7 @@ Task(
|
||||
${getDimensionGuidance(dimension)}
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat review-dimension-results-schema.json
|
||||
- [ ] Schema obtained via json_builder info
|
||||
- [ ] All target files analyzed for ${dimension} concerns
|
||||
- [ ] All findings include file:line references with code snippets
|
||||
- [ ] Severity assessment follows established criteria (see reference)
|
||||
@@ -505,7 +505,7 @@ Task(
|
||||
2. Read affected file: ${file}
|
||||
3. Identify related code: bash(grep -r "import.*${basename(file)}" ${projectDir}/src --include="*.ts")
|
||||
4. Read test files: bash(find ${projectDir}/tests -name "*${basename(file, '.ts')}*" -type f)
|
||||
5. Execute: cat ~/.ccw/workflows/cli-templates/schemas/review-deep-dive-results-schema.json (get output schema reference)
|
||||
5. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"review-deep"}' (get output schema summary)
|
||||
6. Read: .workflow/project-tech.json (technology stack and architecture context)
|
||||
7. Read: .workflow/specs/*.md (user-defined constraints for remediation compliance)
|
||||
|
||||
@@ -538,7 +538,7 @@ Task(
|
||||
- Impact assessment and rollback strategy
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat review-deep-dive-results-schema.json
|
||||
- [ ] Schema obtained via json_builder info
|
||||
- [ ] Root cause clearly identified with supporting evidence
|
||||
- [ ] Remediation plan is step-by-step actionable with exact file:line references
|
||||
- [ ] Each step includes specific commands and validation tests
|
||||
|
||||
@@ -412,7 +412,7 @@ Task(
|
||||
2. Read completed task summaries: bash(find ${summariesDir} -name "IMPL-*.md" -type f)
|
||||
3. Get changed files: bash(cd ${workflowDir} && git log --since="${sessionCreatedAt}" --name-only --pretty=format: | sort -u)
|
||||
4. Read review state: ${reviewStateJsonPath}
|
||||
5. Execute: cat ~/.ccw/workflows/cli-templates/schemas/review-dimension-results-schema.json (get output schema reference)
|
||||
5. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"review-dim"}' (get output schema summary)
|
||||
6. Read: .workflow/project-tech.json (technology stack and architecture context)
|
||||
7. Read: .workflow/specs/*.md (user-defined constraints and conventions to validate against)
|
||||
|
||||
@@ -467,7 +467,7 @@ Task(
|
||||
${getDimensionGuidance(dimension)}
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat review-dimension-results-schema.json
|
||||
- [ ] Schema obtained via json_builder info
|
||||
- [ ] All changed files analyzed for ${dimension} concerns
|
||||
- [ ] All findings include file:line references with code snippets
|
||||
- [ ] Severity assessment follows established criteria (see reference)
|
||||
@@ -516,7 +516,7 @@ Task(
|
||||
2. Read affected file: ${file}
|
||||
3. Identify related code: bash(grep -r "import.*${basename(file)}" ${workflowDir}/src --include="*.ts")
|
||||
4. Read test files: bash(find ${workflowDir}/tests -name "*${basename(file, '.ts')}*" -type f)
|
||||
5. Execute: cat ~/.ccw/workflows/cli-templates/schemas/review-deep-dive-results-schema.json (get output schema reference)
|
||||
5. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"review-deep"}' (get output schema summary)
|
||||
6. Read: .workflow/project-tech.json (technology stack and architecture context)
|
||||
7. Read: .workflow/specs/*.md (user-defined constraints for remediation compliance)
|
||||
|
||||
@@ -550,7 +550,7 @@ Task(
|
||||
- Impact assessment and rollback strategy
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat review-deep-dive-results-schema.json
|
||||
- [ ] Schema obtained via json_builder info
|
||||
- [ ] Root cause clearly identified with supporting evidence
|
||||
- [ ] Remediation plan is step-by-step actionable with exact file:line references
|
||||
- [ ] Each step includes specific commands and validation tests
|
||||
|
||||
@@ -246,8 +246,8 @@ Agent({
|
||||
description: "Generate implementation plan",
|
||||
prompt: `
|
||||
## Schema Reference
|
||||
Execute: cat ~/.ccw/workflows/cli-templates/schemas/plan-overview-base-schema.json
|
||||
Execute: cat ~/.ccw/workflows/cli-templates/schemas/task-schema.json
|
||||
Execute: ccw tool exec json_builder '{"cmd":"info","schema":"plan"}'
|
||||
Execute: ccw tool exec json_builder '{"cmd":"info","schema":"task"}'
|
||||
|
||||
## Output Format: Two-Layer Structure
|
||||
- plan.json: Overview with task_ids[] referencing .task/ files (NO tasks[] array)
|
||||
|
||||
@@ -70,7 +70,7 @@ Task(subagent_type="cli-execution-agent", run_in_background=false, prompt=`
|
||||
## Analysis Steps
|
||||
|
||||
### 0. Load Output Schema (MANDATORY)
|
||||
Execute: cat ~/.ccw/workflows/cli-templates/schemas/conflict-resolution-schema.json
|
||||
Execute: ccw tool exec json_builder '{"cmd":"info","schema":"conflict"}' (get schema summary)
|
||||
|
||||
### 1. Load Context
|
||||
- Read existing files from conflict_detection.existing_files
|
||||
@@ -119,7 +119,7 @@ Task(subagent_type="cli-execution-agent", run_in_background=false, prompt=`
|
||||
|
||||
Output to conflict-resolution.json (generated in Phase 4)
|
||||
|
||||
**Schema Reference**: Execute cat ~/.ccw/workflows/cli-templates/schemas/conflict-resolution-schema.json to get full schema
|
||||
**Schema Reference**: Execute ccw tool exec json_builder '{"cmd":"info","schema":"conflict"}' to get schema summary
|
||||
|
||||
Return JSON following the schema. Key requirements:
|
||||
- Minimum 2 strategies per conflict, max 4
|
||||
|
||||
@@ -195,8 +195,8 @@ Task(
|
||||
## Plan Verification Task
|
||||
|
||||
### MANDATORY FIRST STEPS
|
||||
1. Read: ~/.ccw/workflows/cli-templates/schemas/plan-verify-agent-schema.json (dimensions & rules)
|
||||
2. Read: ~/.ccw/workflows/cli-templates/schemas/verify-json-schema.json (output schema)
|
||||
1. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"plan-verify"}' (dimensions & rules)
|
||||
2. Execute: ccw tool exec json_builder '{"cmd":"info","schema":"verify"}' (output schema)
|
||||
3. Read: ${session_file} (user intent)
|
||||
4. Read: ${PLANNING_NOTES} (constraints & N+1 context)
|
||||
5. Read: ${IMPL_PLAN} (implementation plan)
|
||||
|
||||
@@ -99,7 +99,7 @@ Execute **${angle}** exploration for TDD task planning context. Analyze codebase
|
||||
## MANDATORY FIRST STEPS (Execute by Agent)
|
||||
1. Run: ccw tool exec get_modules_by_depth '{}' (project structure)
|
||||
2. Run: rg -l "{keyword_from_task}" --type ts (locate relevant files)
|
||||
3. Execute: cat ~/.ccw/workflows/cli-templates/schemas/explore-json-schema.json (get output schema reference)
|
||||
3. Execute: ccw tool exec json_builder '{"cmd":"init","schema":"explore","output":"${sessionFolder}/exploration-${angle}.json"}' (init output + get schema info)
|
||||
|
||||
## Exploration Strategy (${angle} focus)
|
||||
|
||||
@@ -121,7 +121,7 @@ Execute **${angle}** exploration for TDD task planning context. Analyze codebase
|
||||
|
||||
**File**: ${sessionFolder}/exploration-${angle}.json
|
||||
|
||||
**Schema Reference**: Schema obtained in MANDATORY FIRST STEPS step 3, follow schema exactly
|
||||
**Schema Reference**: Skeleton initialized via json_builder in step 3. Use `set` to populate fields, `validate` before returning.
|
||||
|
||||
**Required Fields** (all ${angle} focused):
|
||||
- project_structure: Modules/architecture relevant to ${angle}
|
||||
@@ -141,7 +141,7 @@ Execute **${angle}** exploration for TDD task planning context. Analyze codebase
|
||||
- _metadata.exploration_angle: "${angle}"
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat explore-json-schema.json
|
||||
- [ ] Schema initialized via json_builder init
|
||||
- [ ] get_modules_by_depth.sh executed
|
||||
- [ ] At least 3 relevant files identified with ${angle} rationale
|
||||
- [ ] Patterns are actionable (code examples, not generic advice)
|
||||
|
||||
@@ -70,7 +70,7 @@ Task(subagent_type="cli-execution-agent", run_in_background=false, prompt=`
|
||||
## Analysis Steps
|
||||
|
||||
### 0. Load Output Schema (MANDATORY)
|
||||
Execute: cat ~/.ccw/workflows/cli-templates/schemas/conflict-resolution-schema.json
|
||||
Execute: ccw tool exec json_builder '{"cmd":"info","schema":"conflict"}'
|
||||
|
||||
### 1. Load Context
|
||||
- Read existing files from conflict_detection.existing_files
|
||||
@@ -119,7 +119,7 @@ Task(subagent_type="cli-execution-agent", run_in_background=false, prompt=`
|
||||
|
||||
Output to conflict-resolution.json (generated in Phase 4)
|
||||
|
||||
**Schema Reference**: Execute cat ~/.ccw/workflows/cli-templates/schemas/conflict-resolution-schema.json to get full schema
|
||||
**Schema Reference**: Execute ccw tool exec json_builder '{"cmd":"info","schema":"conflict"}' to get schema summary
|
||||
|
||||
Return JSON following the schema. Key requirements:
|
||||
- Minimum 2 strategies per conflict, max 4
|
||||
|
||||
@@ -223,7 +223,7 @@ Execute **${angle}** exploration for task planning context. Analyze codebase fro
|
||||
## MANDATORY FIRST STEPS (Execute by Agent)
|
||||
1. Run: ccw tool exec get_modules_by_depth '{}' (project structure)
|
||||
2. Run: rg -l "{keyword_from_task}" --type ts (locate relevant files)
|
||||
3. Execute: cat ~/.ccw/workflows/cli-templates/schemas/explore-json-schema.json (get output schema reference)
|
||||
3. Execute: ccw tool exec json_builder '{"cmd":"init","schema":"explore","output":"${sessionFolder}/exploration-${angle}.json"}' (init output + get schema info)
|
||||
|
||||
## Exploration Strategy (${angle} focus)
|
||||
|
||||
@@ -265,7 +265,7 @@ Execute **${angle}** exploration for task planning context. Analyze codebase fro
|
||||
- _metadata.exploration_angle: "${angle}"
|
||||
|
||||
## Success Criteria
|
||||
- [ ] Schema obtained via cat explore-json-schema.json
|
||||
- [ ] Schema initialized via json_builder init
|
||||
- [ ] get_modules_by_depth.sh executed
|
||||
- [ ] At least 3 relevant files identified with ${angle} rationale
|
||||
- [ ] Patterns are actionable (code examples, not generic advice)
|
||||
|
||||
@@ -27,6 +27,7 @@ import * as contextCacheMod from './context-cache.js';
|
||||
import * as skillContextLoaderMod from './skill-context-loader.js';
|
||||
import * as askQuestionMod from './ask-question.js';
|
||||
import * as teamMsgMod from './team-msg.js';
|
||||
import * as jsonBuilderMod from './json-builder.js';
|
||||
|
||||
|
||||
// Import legacy JS tools
|
||||
@@ -315,6 +316,7 @@ registerTool(toLegacyTool(contextCacheMod));
|
||||
registerTool(toLegacyTool(skillContextLoaderMod));
|
||||
registerTool(toLegacyTool(askQuestionMod));
|
||||
registerTool(toLegacyTool(teamMsgMod));
|
||||
registerTool(toLegacyTool(jsonBuilderMod));
|
||||
|
||||
// Register legacy JS tools
|
||||
registerTool(uiGeneratePreviewTool);
|
||||
|
||||
836
ccw/src/tools/json-builder.ts
Normal file
836
ccw/src/tools/json-builder.ts
Normal file
@@ -0,0 +1,836 @@
|
||||
/**
|
||||
* JSON Builder Tool - Schema-aware structured JSON construction/validation.
|
||||
*
|
||||
* Commands:
|
||||
* init — Create empty schema-compliant JSON skeleton
|
||||
* set — Set/append fields with instant validation
|
||||
* validate — Full schema + semantic validation
|
||||
* merge — Merge multiple same-schema JSONs
|
||||
* info — Get schema summary (replaces agent reading raw schema)
|
||||
*
|
||||
* Replaces agent hand-writing JSON + self-validation with tool-assisted
|
||||
* incremental build + automatic validation.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import type { ToolSchema, ToolResult } from '../types/tool.js';
|
||||
import { readFileSync, writeFileSync, existsSync, mkdirSync } from 'fs';
|
||||
import { resolve, dirname } from 'path';
|
||||
import { validatePath } from '../utils/path-validator.js';
|
||||
import {
|
||||
loadSchema,
|
||||
getSchemaInfo,
|
||||
listSchemas,
|
||||
type JsonSchema,
|
||||
type JsonSchemaProperty,
|
||||
} from './schema-registry.js';
|
||||
|
||||
// ─── Params ──────────────────────────────────────────────────
|
||||
|
||||
const OpSchema = z.object({
|
||||
path: z.string().min(1),
|
||||
value: z.unknown(),
|
||||
});
|
||||
|
||||
const ParamsSchema = z.object({
|
||||
cmd: z.enum(['init', 'set', 'validate', 'merge', 'info']),
|
||||
schema: z.string().optional(),
|
||||
target: z.string().optional(),
|
||||
output: z.string().optional(),
|
||||
ops: z.array(OpSchema).optional(),
|
||||
sources: z.array(z.string()).optional(),
|
||||
strategy: z.string().optional(),
|
||||
});
|
||||
|
||||
type Params = z.infer<typeof ParamsSchema>;
|
||||
|
||||
// ─── Tool Schema ─────────────────────────────────────────────
|
||||
|
||||
export const schema: ToolSchema = {
|
||||
name: 'json_builder',
|
||||
description: `Schema-aware JSON builder with validation. Commands:
|
||||
init: Create skeleton from schema. Params: schema (string), output (string)
|
||||
set: Set/append fields. Params: target (string), ops [{path, value}...]
|
||||
validate: Full validation. Params: target (string), schema? (string)
|
||||
merge: Merge JSONs. Params: sources (string[]), output (string), strategy? (string)
|
||||
info: Schema summary. Params: schema (string)`,
|
||||
inputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
cmd: { type: 'string', description: 'Command: init|set|validate|merge|info' },
|
||||
schema: { type: 'string', description: 'Schema ID (e.g. explore, task, diagnosis)' },
|
||||
target: { type: 'string', description: 'Target JSON file path' },
|
||||
output: { type: 'string', description: 'Output file path' },
|
||||
ops: {
|
||||
type: 'array',
|
||||
description: 'Set operations: [{path: "field.sub" or "arr[+]", value: ...}]',
|
||||
},
|
||||
sources: { type: 'array', description: 'Source files for merge' },
|
||||
strategy: { type: 'string', description: 'Merge strategy: dedup_by_path (default)' },
|
||||
},
|
||||
required: ['cmd'],
|
||||
},
|
||||
};
|
||||
|
||||
// ─── Handler ─────────────────────────────────────────────────
|
||||
|
||||
export async function handler(params: Record<string, unknown>): Promise<ToolResult> {
|
||||
const parsed = ParamsSchema.safeParse(params);
|
||||
if (!parsed.success) {
|
||||
return { success: false, error: `Invalid params: ${parsed.error.message}` };
|
||||
}
|
||||
|
||||
const p = parsed.data;
|
||||
try {
|
||||
switch (p.cmd) {
|
||||
case 'init': return await cmdInit(p);
|
||||
case 'set': return await cmdSet(p);
|
||||
case 'validate':return await cmdValidate(p);
|
||||
case 'merge': return await cmdMerge(p);
|
||||
case 'info': return cmdInfo(p);
|
||||
default:
|
||||
return { success: false, error: `Unknown command: ${p.cmd}` };
|
||||
}
|
||||
} catch (err) {
|
||||
return { success: false, error: (err as Error).message };
|
||||
}
|
||||
}
|
||||
|
||||
// ─── init ────────────────────────────────────────────────────
|
||||
|
||||
async function cmdInit(p: Params): Promise<ToolResult> {
|
||||
if (!p.schema) return { success: false, error: 'schema is required for init' };
|
||||
if (!p.output) return { success: false, error: 'output is required for init' };
|
||||
|
||||
const jsonSchema = loadSchema(p.schema);
|
||||
const skeleton = buildSkeleton(jsonSchema);
|
||||
const outputPath = await validatePath(p.output);
|
||||
ensureDir(outputPath);
|
||||
const content = JSON.stringify(skeleton, null, 2);
|
||||
writeFileSync(outputPath, content, 'utf-8');
|
||||
|
||||
const info = getSchemaInfo(p.schema);
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
path: outputPath,
|
||||
schema: p.schema,
|
||||
requiredFields: info.requiredFields,
|
||||
arrayFields: info.arrayFields,
|
||||
message: `Initialized ${p.schema} skeleton (${info.requiredFields.length} required fields)`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Build a JSON skeleton from schema — fills required fields with type-appropriate defaults
|
||||
*/
|
||||
function buildSkeleton(schema: JsonSchema): Record<string, unknown> {
|
||||
const result: Record<string, unknown> = {};
|
||||
const props = schema.properties || {};
|
||||
const required = new Set(schema.required || []);
|
||||
|
||||
for (const [name, prop] of Object.entries(props)) {
|
||||
if (name.startsWith('_comment') || name.startsWith('$')) continue;
|
||||
if (name === 'deprecated' || name === 'deprecated_message' || name === 'migration_guide') continue;
|
||||
if (name === '_field_usage_by_producer' || name === '_directory_convention') continue;
|
||||
|
||||
// Only include required fields in skeleton
|
||||
if (!required.has(name)) continue;
|
||||
|
||||
result[name] = getDefaultValue(prop);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function getDefaultValue(prop: JsonSchemaProperty): unknown {
|
||||
if (prop.default !== undefined) return prop.default;
|
||||
|
||||
const type = Array.isArray(prop.type) ? prop.type[0] : prop.type;
|
||||
switch (type) {
|
||||
case 'string': return '';
|
||||
case 'number':
|
||||
case 'integer': return 0;
|
||||
case 'boolean': return false;
|
||||
case 'array': return [];
|
||||
case 'object': {
|
||||
if (!prop.properties) return {};
|
||||
const obj: Record<string, unknown> = {};
|
||||
const reqSet = new Set(prop.required || []);
|
||||
for (const [k, v] of Object.entries(prop.properties)) {
|
||||
if (reqSet.has(k)) {
|
||||
obj[k] = getDefaultValue(v);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
|
||||
// ─── set ─────────────────────────────────────────────────────
|
||||
|
||||
async function cmdSet(p: Params): Promise<ToolResult> {
|
||||
if (!p.target) return { success: false, error: 'target is required for set' };
|
||||
if (!p.ops || p.ops.length === 0) return { success: false, error: 'ops is required for set' };
|
||||
|
||||
const targetPath = await validatePath(p.target);
|
||||
if (!existsSync(targetPath)) {
|
||||
return { success: false, error: `Target file not found: ${targetPath}` };
|
||||
}
|
||||
|
||||
const raw = readFileSync(targetPath, 'utf-8');
|
||||
const doc = JSON.parse(raw) as Record<string, unknown>;
|
||||
|
||||
// Detect schema from doc._metadata?.source or from file name
|
||||
const schemaId = p.schema || detectSchema(doc, targetPath);
|
||||
|
||||
const errors: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
let applied = 0;
|
||||
|
||||
for (const op of p.ops) {
|
||||
const result = applyOp(doc, op.path, op.value, schemaId);
|
||||
if (result.error) {
|
||||
errors.push(`${op.path}: ${result.error}`);
|
||||
} else {
|
||||
applied++;
|
||||
if (result.warnings) warnings.push(...result.warnings);
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length > 0 && applied === 0) {
|
||||
return { success: false, error: `All ops failed: ${errors.join('; ')}` };
|
||||
}
|
||||
|
||||
// Write back
|
||||
writeFileSync(targetPath, JSON.stringify(doc, null, 2), 'utf-8');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: { applied, errors, warnings },
|
||||
};
|
||||
}
|
||||
|
||||
interface OpResult {
|
||||
error?: string;
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
function applyOp(doc: Record<string, unknown>, path: string, value: unknown, schemaId?: string): OpResult {
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Handle "auto" values
|
||||
if (value === 'auto') {
|
||||
if (path.endsWith('timestamp')) {
|
||||
value = new Date().toISOString();
|
||||
}
|
||||
}
|
||||
|
||||
// Parse path: "field.sub", "arr[+]", "arr[0]", "arr[?key=val]"
|
||||
const segments = parsePath(path);
|
||||
if (!segments || segments.length === 0) {
|
||||
return { error: 'Invalid path syntax' };
|
||||
}
|
||||
|
||||
// Validate value against schema if schema is known
|
||||
if (schemaId) {
|
||||
const validationResult = validateFieldValue(schemaId, path, value);
|
||||
if (validationResult.error) return { error: validationResult.error };
|
||||
if (validationResult.warnings) warnings.push(...validationResult.warnings);
|
||||
}
|
||||
|
||||
// Navigate to parent and set
|
||||
let current: unknown = doc;
|
||||
for (let i = 0; i < segments.length - 1; i++) {
|
||||
const seg = segments[i];
|
||||
if (seg.type === 'key') {
|
||||
if (typeof current !== 'object' || current === null) {
|
||||
return { error: `Cannot navigate into non-object at "${seg.value}"` };
|
||||
}
|
||||
const obj = current as Record<string, unknown>;
|
||||
if (obj[seg.value] === undefined) {
|
||||
// Auto-create intermediate objects/arrays
|
||||
const nextSeg = segments[i + 1];
|
||||
obj[seg.value] = nextSeg.type === 'append' || nextSeg.type === 'index' ? [] : {};
|
||||
}
|
||||
current = obj[seg.value];
|
||||
} else if (seg.type === 'index') {
|
||||
if (!Array.isArray(current)) return { error: `Not an array at index ${seg.value}` };
|
||||
current = current[Number(seg.value)];
|
||||
}
|
||||
}
|
||||
|
||||
// Apply final segment
|
||||
const last = segments[segments.length - 1];
|
||||
if (last.type === 'key') {
|
||||
if (typeof current !== 'object' || current === null || Array.isArray(current)) {
|
||||
return { error: `Cannot set key "${last.value}" on non-object` };
|
||||
}
|
||||
(current as Record<string, unknown>)[last.value] = value;
|
||||
} else if (last.type === 'append') {
|
||||
if (!Array.isArray(current)) {
|
||||
return { error: `Cannot append to non-array` };
|
||||
}
|
||||
current.push(value);
|
||||
} else if (last.type === 'index') {
|
||||
if (!Array.isArray(current)) {
|
||||
return { error: `Cannot index into non-array` };
|
||||
}
|
||||
current[Number(last.value)] = value;
|
||||
} else if (last.type === 'query') {
|
||||
if (!Array.isArray(current)) {
|
||||
return { error: `Cannot query non-array` };
|
||||
}
|
||||
const { key, val } = last as QuerySegment;
|
||||
const idx = current.findIndex((item: unknown) =>
|
||||
typeof item === 'object' && item !== null && (item as Record<string, unknown>)[key] === val
|
||||
);
|
||||
if (idx === -1) return { error: `No item found where ${key}=${val}` };
|
||||
current[idx] = value;
|
||||
}
|
||||
|
||||
return { warnings: warnings.length > 0 ? warnings : undefined };
|
||||
}
|
||||
|
||||
interface KeySegment { type: 'key'; value: string; }
|
||||
interface IndexSegment { type: 'index'; value: string; }
|
||||
interface AppendSegment { type: 'append'; value: string; }
|
||||
interface QuerySegment { type: 'query'; value: string; key: string; val: string; }
|
||||
type PathSegment = KeySegment | IndexSegment | AppendSegment | QuerySegment;
|
||||
|
||||
function parsePath(path: string): PathSegment[] | null {
|
||||
const segments: PathSegment[] = [];
|
||||
// Split by '.' but respect brackets
|
||||
const parts = path.split(/\.(?![^\[]*\])/);
|
||||
|
||||
for (const part of parts) {
|
||||
const bracketMatch = part.match(/^(\w+)\[(.+)\]$/);
|
||||
if (bracketMatch) {
|
||||
const [, field, bracket] = bracketMatch;
|
||||
segments.push({ type: 'key', value: field });
|
||||
|
||||
if (bracket === '+') {
|
||||
segments.push({ type: 'append', value: '+' });
|
||||
} else if (/^\d+$/.test(bracket)) {
|
||||
segments.push({ type: 'index', value: bracket });
|
||||
} else if (bracket.includes('=')) {
|
||||
const [key, val] = bracket.split('=', 2);
|
||||
segments.push({ type: 'query', value: bracket, key: key.replace('?', ''), val } as QuerySegment);
|
||||
}
|
||||
} else {
|
||||
segments.push({ type: 'key', value: part });
|
||||
}
|
||||
}
|
||||
|
||||
return segments.length > 0 ? segments : null;
|
||||
}
|
||||
|
||||
// ─── validate ────────────────────────────────────────────────
|
||||
|
||||
async function cmdValidate(p: Params): Promise<ToolResult> {
|
||||
if (!p.target) return { success: false, error: 'target is required for validate' };
|
||||
|
||||
const targetPath = await validatePath(p.target);
|
||||
if (!existsSync(targetPath)) {
|
||||
return { success: false, error: `Target file not found: ${targetPath}` };
|
||||
}
|
||||
|
||||
const raw = readFileSync(targetPath, 'utf-8');
|
||||
let doc: Record<string, unknown>;
|
||||
try {
|
||||
doc = JSON.parse(raw);
|
||||
} catch {
|
||||
return { success: false, error: 'Invalid JSON in target file' };
|
||||
}
|
||||
|
||||
const schemaId = p.schema || detectSchema(doc, targetPath);
|
||||
if (!schemaId) {
|
||||
return { success: false, error: 'Cannot detect schema. Provide schema param.' };
|
||||
}
|
||||
|
||||
const jsonSchema = loadSchema(schemaId);
|
||||
const errors: string[] = [];
|
||||
const warnings: string[] = [];
|
||||
|
||||
// Layer 1: JSON Schema structural validation
|
||||
validateObject(doc, jsonSchema, '', errors, warnings);
|
||||
|
||||
// Layer 2: Semantic quality validation
|
||||
validateSemantics(doc, schemaId, errors, warnings);
|
||||
|
||||
const stats = {
|
||||
fields: Object.keys(doc).filter(k => !k.startsWith('_comment')).length,
|
||||
schema: schemaId,
|
||||
arrayItems: countArrayItems(doc, jsonSchema),
|
||||
};
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
valid: errors.length === 0,
|
||||
errors,
|
||||
warnings,
|
||||
stats,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function validateObject(
|
||||
obj: Record<string, unknown>,
|
||||
schema: JsonSchema | JsonSchemaProperty,
|
||||
prefix: string,
|
||||
errors: string[],
|
||||
warnings: string[],
|
||||
): void {
|
||||
const props = schema.properties || {};
|
||||
const required = new Set(schema.required || []);
|
||||
|
||||
// Check required fields
|
||||
for (const req of required) {
|
||||
const val = obj[req];
|
||||
if (val === undefined || val === null) {
|
||||
errors.push(`${prefix}${req}: required field missing`);
|
||||
} else if (typeof val === 'string' && val === '' && req !== 'error_message') {
|
||||
errors.push(`${prefix}${req}: required field is empty string`);
|
||||
} else if (Array.isArray(val) && val.length === 0) {
|
||||
const propSchema = props[req];
|
||||
if (propSchema?.minItems && propSchema.minItems > 0) {
|
||||
errors.push(`${prefix}${req}: array requires at least ${propSchema.minItems} items`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Validate each field
|
||||
for (const [name, value] of Object.entries(obj)) {
|
||||
if (name.startsWith('_comment') || name.startsWith('$')) continue;
|
||||
const propSchema = props[name];
|
||||
if (!propSchema) continue; // allow additional props
|
||||
|
||||
validateValue(value, propSchema, `${prefix}${name}`, errors, warnings);
|
||||
}
|
||||
}
|
||||
|
||||
function validateValue(
|
||||
value: unknown,
|
||||
propSchema: JsonSchemaProperty,
|
||||
path: string,
|
||||
errors: string[],
|
||||
warnings: string[],
|
||||
): void {
|
||||
if (value === null || value === undefined) return;
|
||||
|
||||
const expectedType = Array.isArray(propSchema.type) ? propSchema.type : [propSchema.type];
|
||||
|
||||
// Type check
|
||||
const actualType = Array.isArray(value) ? 'array' : typeof value;
|
||||
if (propSchema.type && !expectedType.includes(actualType) && !expectedType.includes('null')) {
|
||||
// integer is typeof 'number'
|
||||
if (!(actualType === 'number' && expectedType.includes('integer'))) {
|
||||
errors.push(`${path}: expected ${expectedType.join('|')}, got ${actualType}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Enum check
|
||||
if (propSchema.enum && !propSchema.enum.includes(value as string | number)) {
|
||||
errors.push(`${path}: value "${value}" not in enum [${propSchema.enum.join(', ')}]`);
|
||||
}
|
||||
|
||||
// Const check
|
||||
if (propSchema.const !== undefined && value !== propSchema.const) {
|
||||
errors.push(`${path}: expected const "${propSchema.const}", got "${value}"`);
|
||||
}
|
||||
|
||||
// String constraints
|
||||
if (typeof value === 'string') {
|
||||
if (propSchema.minLength && value.length < propSchema.minLength) {
|
||||
errors.push(`${path}: string length ${value.length} < minLength ${propSchema.minLength}`);
|
||||
}
|
||||
if (propSchema.maxLength && value.length > propSchema.maxLength) {
|
||||
errors.push(`${path}: string length ${value.length} > maxLength ${propSchema.maxLength}`);
|
||||
}
|
||||
if (propSchema.pattern) {
|
||||
try {
|
||||
if (!new RegExp(propSchema.pattern).test(value)) {
|
||||
errors.push(`${path}: does not match pattern "${propSchema.pattern}"`);
|
||||
}
|
||||
} catch { /* skip invalid regex in schema */ }
|
||||
}
|
||||
}
|
||||
|
||||
// Number constraints
|
||||
if (typeof value === 'number') {
|
||||
if (propSchema.minimum !== undefined && value < propSchema.minimum) {
|
||||
errors.push(`${path}: ${value} < minimum ${propSchema.minimum}`);
|
||||
}
|
||||
if (propSchema.maximum !== undefined && value > propSchema.maximum) {
|
||||
errors.push(`${path}: ${value} > maximum ${propSchema.maximum}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Array constraints
|
||||
if (Array.isArray(value)) {
|
||||
if (propSchema.minItems && value.length < propSchema.minItems) {
|
||||
errors.push(`${path}: array has ${value.length} items, needs >= ${propSchema.minItems}`);
|
||||
}
|
||||
if (propSchema.maxItems && value.length > propSchema.maxItems) {
|
||||
warnings.push(`${path}: array has ${value.length} items, max recommended ${propSchema.maxItems}`);
|
||||
}
|
||||
// Validate each item
|
||||
if (propSchema.items && typeof propSchema.items === 'object') {
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const item = value[i];
|
||||
if (propSchema.items.type === 'object' && typeof item === 'object' && item !== null) {
|
||||
validateObject(item as Record<string, unknown>, propSchema.items, `${path}[${i}].`, errors, warnings);
|
||||
} else {
|
||||
validateValue(item, propSchema.items, `${path}[${i}]`, errors, warnings);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Object: recurse
|
||||
if (typeof value === 'object' && !Array.isArray(value) && value !== null && propSchema.properties) {
|
||||
validateObject(value as Record<string, unknown>, propSchema, `${path}.`, errors, warnings);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Semantic Validation (Layer 2) ───────────────────────────
|
||||
|
||||
function validateSemantics(doc: Record<string, unknown>, schemaId: string, errors: string[], warnings: string[]): void {
|
||||
// explore + diagnosis: file list quality
|
||||
if (schemaId === 'explore') {
|
||||
validateFileList(doc, 'relevant_files', errors, warnings);
|
||||
} else if (schemaId === 'diagnosis') {
|
||||
validateFileList(doc, 'affected_files', errors, warnings);
|
||||
}
|
||||
|
||||
// task: circular dependency check
|
||||
if (schemaId === 'task' || schemaId === 'solution' || schemaId === 'plan' || schemaId === 'plan-legacy') {
|
||||
validateNoCyclicDeps(doc, errors);
|
||||
}
|
||||
}
|
||||
|
||||
const GENERIC_PHRASES = [
|
||||
'related to', 'relevant file', 'relevant to', 'important file',
|
||||
'related file', 'useful for', 'needed for',
|
||||
];
|
||||
|
||||
function validateFileList(doc: Record<string, unknown>, field: string, errors: string[], warnings: string[]): void {
|
||||
const files = doc[field];
|
||||
if (!Array.isArray(files)) return;
|
||||
|
||||
const allManual = files.length > 0 && files.every((f: Record<string, unknown>) => f.discovery_source === 'manual');
|
||||
if (allManual && files.length > 3) {
|
||||
warnings.push(`${field}: all ${files.length} files discovered via "manual" — consider using bash-scan or cli-analysis`);
|
||||
}
|
||||
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const f = files[i] as Record<string, unknown>;
|
||||
const rationale = (f.rationale as string) || '';
|
||||
const relevance = (f.relevance as number) || 0;
|
||||
|
||||
// Check generic rationale
|
||||
const lower = rationale.toLowerCase();
|
||||
for (const phrase of GENERIC_PHRASES) {
|
||||
if (lower === phrase || (lower.length < 25 && lower.includes(phrase))) {
|
||||
warnings.push(`${field}[${i}].rationale: too generic ("${rationale}") — be more specific`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// High relevance files need key_code and topic_relation
|
||||
if (relevance >= 0.7) {
|
||||
if (!f.key_code || (Array.isArray(f.key_code) && (f.key_code as unknown[]).length === 0)) {
|
||||
warnings.push(`${field}[${i}]: relevance=${relevance} but missing key_code (recommended for >= 0.7)`);
|
||||
}
|
||||
if (!f.topic_relation) {
|
||||
warnings.push(`${field}[${i}]: relevance=${relevance} but missing topic_relation (recommended for >= 0.7)`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function validateNoCyclicDeps(doc: Record<string, unknown>, errors: string[]): void {
|
||||
const tasks = (doc.tasks as Array<Record<string, unknown>>) || [];
|
||||
if (tasks.length === 0) return;
|
||||
|
||||
// Build adjacency
|
||||
const deps = new Map<string, string[]>();
|
||||
for (const t of tasks) {
|
||||
const id = t.id as string;
|
||||
if (!id) continue;
|
||||
deps.set(id, (t.depends_on as string[]) || []);
|
||||
}
|
||||
|
||||
// DFS cycle check
|
||||
const visited = new Set<string>();
|
||||
const stack = new Set<string>();
|
||||
|
||||
function hasCycle(node: string): boolean {
|
||||
if (stack.has(node)) return true;
|
||||
if (visited.has(node)) return false;
|
||||
visited.add(node);
|
||||
stack.add(node);
|
||||
for (const dep of deps.get(node) || []) {
|
||||
if (hasCycle(dep)) return true;
|
||||
}
|
||||
stack.delete(node);
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const id of deps.keys()) {
|
||||
if (hasCycle(id)) {
|
||||
errors.push(`tasks: circular dependency detected involving "${id}"`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function countArrayItems(doc: Record<string, unknown>, schema: JsonSchema): Record<string, number> {
|
||||
const counts: Record<string, number> = {};
|
||||
for (const [name, value] of Object.entries(doc)) {
|
||||
if (Array.isArray(value)) {
|
||||
counts[name] = value.length;
|
||||
}
|
||||
}
|
||||
return counts;
|
||||
}
|
||||
|
||||
// ─── Field-level validation (for set) ────────────────────────
|
||||
|
||||
interface FieldValidation {
|
||||
error?: string;
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
function validateFieldValue(schemaId: string, fieldPath: string, value: unknown): FieldValidation {
|
||||
const warnings: string[] = [];
|
||||
let jsonSchema: JsonSchema;
|
||||
try {
|
||||
jsonSchema = loadSchema(schemaId);
|
||||
} catch {
|
||||
return {}; // Skip validation if schema not found
|
||||
}
|
||||
|
||||
// Resolve the property schema for this path
|
||||
const propSchema = resolvePropertySchema(jsonSchema, fieldPath);
|
||||
if (!propSchema) return {}; // Unknown field, allow it
|
||||
|
||||
// For array appends, validate the item against items schema
|
||||
if (fieldPath.includes('[+]') || fieldPath.match(/\[\d+\]/)) {
|
||||
const itemSchema = propSchema.items;
|
||||
if (itemSchema && typeof value === 'object' && value !== null) {
|
||||
const errors: string[] = [];
|
||||
if (itemSchema.type === 'object') {
|
||||
validateObject(value as Record<string, unknown>, itemSchema, '', errors, warnings);
|
||||
}
|
||||
if (errors.length > 0) return { error: errors.join('; ') };
|
||||
}
|
||||
return { warnings: warnings.length > 0 ? warnings : undefined };
|
||||
}
|
||||
|
||||
// For direct field set, validate the value
|
||||
const errors: string[] = [];
|
||||
validateValue(value, propSchema, fieldPath, errors, warnings);
|
||||
if (errors.length > 0) return { error: errors.join('; ') };
|
||||
return { warnings: warnings.length > 0 ? warnings : undefined };
|
||||
}
|
||||
|
||||
function resolvePropertySchema(schema: JsonSchema, fieldPath: string): JsonSchemaProperty | null {
|
||||
const cleanPath = fieldPath.replace(/\[\+\]|\[\d+\]|\[\?[^\]]+\]/g, '');
|
||||
const parts = cleanPath.split('.');
|
||||
let current: JsonSchemaProperty | undefined = schema as unknown as JsonSchemaProperty;
|
||||
|
||||
for (const part of parts) {
|
||||
if (!part) continue;
|
||||
if (current?.properties?.[part]) {
|
||||
current = current.properties[part];
|
||||
} else if (current?.items?.properties?.[part]) {
|
||||
current = current.items.properties[part];
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return current || null;
|
||||
}
|
||||
|
||||
// ─── merge ───────────────────────────────────────────────────
|
||||
|
||||
async function cmdMerge(p: Params): Promise<ToolResult> {
|
||||
if (!p.sources || p.sources.length < 2) {
|
||||
return { success: false, error: 'merge requires at least 2 sources' };
|
||||
}
|
||||
if (!p.output) return { success: false, error: 'output is required for merge' };
|
||||
|
||||
const docs: Record<string, unknown>[] = [];
|
||||
for (const src of p.sources) {
|
||||
const srcPath = await validatePath(src);
|
||||
if (!existsSync(srcPath)) {
|
||||
return { success: false, error: `Source not found: ${srcPath}` };
|
||||
}
|
||||
docs.push(JSON.parse(readFileSync(srcPath, 'utf-8')));
|
||||
}
|
||||
|
||||
const schemaId = p.schema || detectSchema(docs[0], p.sources[0]);
|
||||
const jsonSchema = schemaId ? loadSchema(schemaId) : null;
|
||||
const strategy = p.strategy || 'dedup_by_path';
|
||||
|
||||
const merged = mergeDocuments(docs, jsonSchema, strategy);
|
||||
|
||||
const outputPath = await validatePath(p.output);
|
||||
ensureDir(outputPath);
|
||||
writeFileSync(outputPath, JSON.stringify(merged, null, 2), 'utf-8');
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: {
|
||||
path: outputPath,
|
||||
sourceCount: docs.length,
|
||||
strategy,
|
||||
message: `Merged ${docs.length} documents`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function mergeDocuments(
|
||||
docs: Record<string, unknown>[],
|
||||
schema: JsonSchema | null,
|
||||
strategy: string,
|
||||
): Record<string, unknown> {
|
||||
const base = structuredClone(docs[0]);
|
||||
const props = schema?.properties || {};
|
||||
|
||||
for (let i = 1; i < docs.length; i++) {
|
||||
const other = docs[i];
|
||||
for (const [key, value] of Object.entries(other)) {
|
||||
if (key.startsWith('_') || key.startsWith('$')) continue;
|
||||
|
||||
const existing = base[key];
|
||||
const propSchema = props[key];
|
||||
const propType = propSchema?.type;
|
||||
|
||||
if (Array.isArray(existing) && Array.isArray(value)) {
|
||||
// Array merge with dedup
|
||||
if (strategy === 'dedup_by_path') {
|
||||
base[key] = deduplicateArrays(existing, value);
|
||||
} else {
|
||||
base[key] = [...existing, ...value];
|
||||
}
|
||||
} else if (typeof existing === 'string' && typeof value === 'string' && propType === 'string') {
|
||||
// Text fields: concatenate if both non-empty
|
||||
if (existing && value && existing !== value) {
|
||||
base[key] = `${existing}\n\n${value}`;
|
||||
} else if (!existing && value) {
|
||||
base[key] = value;
|
||||
}
|
||||
} else if (existing === undefined || existing === null || existing === '' || existing === 0) {
|
||||
// Fill empty values
|
||||
base[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update metadata
|
||||
if (base._metadata && typeof base._metadata === 'object') {
|
||||
(base._metadata as Record<string, unknown>).timestamp = new Date().toISOString();
|
||||
(base._metadata as Record<string, unknown>).merged_from = docs.length;
|
||||
}
|
||||
|
||||
return base;
|
||||
}
|
||||
|
||||
function deduplicateArrays(a: unknown[], b: unknown[]): unknown[] {
|
||||
const result = [...a];
|
||||
const existingPaths = new Set(
|
||||
a.filter(item => typeof item === 'object' && item !== null)
|
||||
.map(item => (item as Record<string, unknown>).path as string)
|
||||
.filter(Boolean)
|
||||
);
|
||||
|
||||
for (const item of b) {
|
||||
if (typeof item === 'object' && item !== null) {
|
||||
const path = (item as Record<string, unknown>).path as string;
|
||||
if (path && existingPaths.has(path)) {
|
||||
// Dedup: keep the one with higher relevance
|
||||
const existingIdx = result.findIndex(
|
||||
e => typeof e === 'object' && e !== null && (e as Record<string, unknown>).path === path
|
||||
);
|
||||
if (existingIdx !== -1) {
|
||||
const existingRel = ((result[existingIdx] as Record<string, unknown>).relevance as number) || 0;
|
||||
const newRel = ((item as Record<string, unknown>).relevance as number) || 0;
|
||||
if (newRel > existingRel) {
|
||||
result[existingIdx] = item;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result.push(item);
|
||||
if (path) existingPaths.add(path);
|
||||
}
|
||||
} else {
|
||||
// Primitive: dedup by value
|
||||
if (!result.includes(item)) {
|
||||
result.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// ─── info ────────────────────────────────────────────────────
|
||||
|
||||
function cmdInfo(p: Params): ToolResult {
|
||||
if (!p.schema) {
|
||||
// List all schemas
|
||||
const schemas = listSchemas();
|
||||
const summaries = schemas.map(id => {
|
||||
try {
|
||||
const info = getSchemaInfo(id);
|
||||
return { id, title: info.title, required: info.requiredFields.length, format: info.format };
|
||||
} catch {
|
||||
return { id, title: '(load error)', required: 0, format: 'json' };
|
||||
}
|
||||
});
|
||||
return { success: true, result: { schemas: summaries } };
|
||||
}
|
||||
|
||||
const info = getSchemaInfo(p.schema);
|
||||
return { success: true, result: info };
|
||||
}
|
||||
|
||||
// ─── Utilities ───────────────────────────────────────────────
|
||||
|
||||
function ensureDir(filePath: string): void {
|
||||
const dir = dirname(filePath);
|
||||
if (!existsSync(dir)) {
|
||||
mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
function detectSchema(doc: Record<string, unknown>, filePath: string): string | undefined {
|
||||
// Try _metadata.source
|
||||
const meta = doc._metadata as Record<string, unknown> | undefined;
|
||||
if (meta?.source === 'cli-explore-agent') {
|
||||
if (doc.symptom || doc.root_cause) return 'diagnosis';
|
||||
return 'explore';
|
||||
}
|
||||
|
||||
// Try file name patterns
|
||||
const lower = (filePath || '').toLowerCase();
|
||||
if (lower.includes('exploration') || lower.includes('explore')) return 'explore';
|
||||
if (lower.includes('diagnosis') || lower.includes('diagnos')) return 'diagnosis';
|
||||
if (lower.includes('finding') || lower.includes('discovery')) return 'finding';
|
||||
if (lower.includes('fix-plan') || lower.includes('fixplan')) return 'fix-legacy';
|
||||
if (lower.includes('plan')) return 'plan';
|
||||
if (lower.includes('task') || lower.includes('impl-')) return 'task';
|
||||
if (lower.includes('solution')) return 'solution';
|
||||
if (lower.includes('queue')) return 'queue';
|
||||
if (lower.includes('review-dim')) return 'review-dim';
|
||||
if (lower.includes('review-deep')) return 'review-deep';
|
||||
|
||||
return undefined;
|
||||
}
|
||||
201
ccw/src/tools/schema-registry.ts
Normal file
201
ccw/src/tools/schema-registry.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
/**
|
||||
* Schema Registry - Loads and caches JSON schemas from the schemas directory.
|
||||
* Provides schema metadata extraction for json-builder tool.
|
||||
*/
|
||||
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { resolve, dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
export interface SchemaEntry {
|
||||
id: string;
|
||||
title: string;
|
||||
file: string;
|
||||
format: 'json' | 'jsonl' | 'ndjson';
|
||||
/** Top-level array field names (for append operations) */
|
||||
arrayFields: string[];
|
||||
}
|
||||
|
||||
export interface SchemaInfo {
|
||||
id: string;
|
||||
title: string;
|
||||
description: string;
|
||||
requiredFields: string[];
|
||||
optionalFields: string[];
|
||||
arrayFields: string[];
|
||||
enumFields: Record<string, string[]>;
|
||||
format: string;
|
||||
}
|
||||
|
||||
interface JsonSchema {
|
||||
title?: string;
|
||||
description?: string;
|
||||
type?: string;
|
||||
required?: string[];
|
||||
properties?: Record<string, JsonSchemaProperty>;
|
||||
items?: JsonSchemaProperty;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
interface JsonSchemaProperty {
|
||||
type?: string | string[];
|
||||
enum?: (string | number)[];
|
||||
const?: unknown;
|
||||
required?: string[];
|
||||
properties?: Record<string, JsonSchemaProperty>;
|
||||
items?: JsonSchemaProperty;
|
||||
minLength?: number;
|
||||
maxLength?: number;
|
||||
minItems?: number;
|
||||
maxItems?: number;
|
||||
minimum?: number;
|
||||
maximum?: number;
|
||||
pattern?: string;
|
||||
format?: string;
|
||||
description?: string;
|
||||
default?: unknown;
|
||||
oneOf?: JsonSchemaProperty[];
|
||||
anyOf?: JsonSchemaProperty[];
|
||||
additionalProperties?: boolean | JsonSchemaProperty;
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
// Schema definitions — maps short IDs to schema files
|
||||
const SCHEMA_DEFS: Record<string, Omit<SchemaEntry, 'id' | 'title'>> = {
|
||||
'explore': { file: 'explore-json-schema.json', arrayFields: ['relevant_files', 'clarification_needs'], format: 'json' },
|
||||
'diagnosis': { file: 'diagnosis-json-schema.json', arrayFields: ['affected_files', 'reproduction_steps', 'fix_hints', 'clarification_needs'], format: 'json' },
|
||||
'finding': { file: 'discovery-finding-schema.json', arrayFields: ['findings', 'cross_references'], format: 'json' },
|
||||
'plan': { file: 'plan-overview-base-schema.json', arrayFields: ['tasks', 'design_decisions', 'focus_paths'], format: 'json' },
|
||||
'plan-fix': { file: 'plan-overview-fix-schema.json', arrayFields: ['tasks', 'focus_paths'], format: 'json' },
|
||||
'plan-legacy': { file: 'plan-json-schema.json', arrayFields: ['tasks', 'design_decisions', 'focus_paths'], format: 'json' },
|
||||
'fix-legacy': { file: 'fix-plan-json-schema.json', arrayFields: ['tasks', 'focus_paths'], format: 'json' },
|
||||
'tech': { file: 'project-tech-schema.json', arrayFields: [], format: 'json' },
|
||||
'guidelines': { file: 'project-guidelines-schema.json', arrayFields: [], format: 'json' },
|
||||
'issue': { file: 'issues-jsonl-schema.json', arrayFields: [], format: 'jsonl' },
|
||||
'queue': { file: 'queue-schema.json', arrayFields: ['entries'], format: 'json' },
|
||||
'review-dim': { file: 'review-dimension-results-schema.json', arrayFields: ['results'], format: 'json' },
|
||||
'review-deep': { file: 'review-deep-dive-results-schema.json', arrayFields: ['results'], format: 'json' },
|
||||
'debug-log': { file: 'debug-log-json-schema.json', arrayFields: [], format: 'ndjson' },
|
||||
'discussion': { file: 'multi-cli-discussion-schema.json', arrayFields: ['turns'], format: 'json' },
|
||||
'task': { file: 'task-schema.json', arrayFields: ['files', 'implementation', 'risks', 'pre_analysis', 'artifacts'], format: 'json' },
|
||||
'solution': { file: 'solution-schema.json', arrayFields: ['tasks'], format: 'json' },
|
||||
'verify': { file: 'verify-json-schema.json', arrayFields: [], format: 'json' },
|
||||
'discovery-state': { file: 'discovery-state-schema.json', arrayFields: [], format: 'json' },
|
||||
'conflict': { file: 'conflict-resolution-schema.json', arrayFields: [], format: 'json' },
|
||||
'registry': { file: 'registry-schema.json', arrayFields: [], format: 'json' },
|
||||
'team-tasks': { file: 'team-tasks-schema.json', arrayFields: [], format: 'json' },
|
||||
'plan-verify': { file: 'plan-verify-agent-schema.json', arrayFields: [], format: 'json' },
|
||||
};
|
||||
|
||||
// Cache loaded schemas
|
||||
const schemaCache = new Map<string, JsonSchema>();
|
||||
|
||||
/**
|
||||
* Resolve the schemas directory path
|
||||
*/
|
||||
function getSchemasDir(): string {
|
||||
// Try environment variable first
|
||||
if (process.env.CCW_HOME) {
|
||||
return resolve(process.env.CCW_HOME, 'workflows', 'cli-templates', 'schemas');
|
||||
}
|
||||
// Try home directory
|
||||
const home = process.env.HOME || process.env.USERPROFILE || '';
|
||||
const ccwDir = resolve(home, '.ccw', 'workflows', 'cli-templates', 'schemas');
|
||||
if (existsSync(ccwDir)) return ccwDir;
|
||||
// Fallback to relative from this file
|
||||
const thisDir = dirname(fileURLToPath(import.meta.url));
|
||||
return resolve(thisDir, '..', '..', '..', '.ccw', 'workflows', 'cli-templates', 'schemas');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a raw JSON schema by ID
|
||||
*/
|
||||
export function loadSchema(schemaId: string): JsonSchema {
|
||||
const cached = schemaCache.get(schemaId);
|
||||
if (cached) return cached;
|
||||
|
||||
const def = SCHEMA_DEFS[schemaId];
|
||||
if (!def) {
|
||||
throw new Error(`Unknown schema: "${schemaId}". Available: ${Object.keys(SCHEMA_DEFS).join(', ')}`);
|
||||
}
|
||||
|
||||
const schemasDir = getSchemasDir();
|
||||
const filePath = resolve(schemasDir, def.file);
|
||||
if (!existsSync(filePath)) {
|
||||
throw new Error(`Schema file not found: ${filePath}`);
|
||||
}
|
||||
|
||||
const raw = readFileSync(filePath, 'utf-8');
|
||||
const schema = JSON.parse(raw) as JsonSchema;
|
||||
schemaCache.set(schemaId, schema);
|
||||
return schema;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get schema entry metadata (without loading full schema)
|
||||
*/
|
||||
export function getSchemaEntry(schemaId: string): SchemaEntry {
|
||||
const def = SCHEMA_DEFS[schemaId];
|
||||
if (!def) {
|
||||
throw new Error(`Unknown schema: "${schemaId}". Available: ${Object.keys(SCHEMA_DEFS).join(', ')}`);
|
||||
}
|
||||
const schema = loadSchema(schemaId);
|
||||
return { id: schemaId, title: schema.title || schemaId, ...def };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get schema info summary (for agent consumption — replaces reading full schema)
|
||||
*/
|
||||
export function getSchemaInfo(schemaId: string): SchemaInfo {
|
||||
const schema = loadSchema(schemaId);
|
||||
const def = SCHEMA_DEFS[schemaId];
|
||||
const props = schema.properties || {};
|
||||
const required = schema.required || [];
|
||||
const allFields = Object.keys(props).filter(k => !k.startsWith('_comment'));
|
||||
const optional = allFields.filter(f => !required.includes(f));
|
||||
|
||||
const enumFields: Record<string, string[]> = {};
|
||||
for (const [name, prop] of Object.entries(props)) {
|
||||
if (name.startsWith('_comment')) continue;
|
||||
if (prop.enum) {
|
||||
enumFields[name] = prop.enum.map(String);
|
||||
}
|
||||
// Check nested enum in properties
|
||||
if (prop.properties) {
|
||||
for (const [sub, subProp] of Object.entries(prop.properties)) {
|
||||
if (subProp.enum) {
|
||||
enumFields[`${name}.${sub}`] = subProp.enum.map(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Check items enum for array fields
|
||||
if (prop.items && typeof prop.items === 'object' && prop.items.properties) {
|
||||
for (const [sub, subProp] of Object.entries(prop.items.properties)) {
|
||||
if (subProp.enum) {
|
||||
enumFields[`${name}[].${sub}`] = subProp.enum.map(String);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
id: schemaId,
|
||||
title: schema.title || schemaId,
|
||||
description: schema.description || '',
|
||||
requiredFields: required,
|
||||
optionalFields: optional,
|
||||
arrayFields: def.arrayFields,
|
||||
enumFields,
|
||||
format: def.format,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available schema IDs
|
||||
*/
|
||||
export function listSchemas(): string[] {
|
||||
return Object.keys(SCHEMA_DEFS);
|
||||
}
|
||||
|
||||
// Exports for validation
|
||||
export type { JsonSchema, JsonSchemaProperty };
|
||||
Reference in New Issue
Block a user