refactor: 更新项目结构,替换 project.json 为 project-tech.json,添加新架构和技术分析

This commit is contained in:
catlog22
2026-01-16 13:33:38 +08:00
parent d515090097
commit e2d56bc08a
8 changed files with 202 additions and 42 deletions

View File

@@ -132,7 +132,7 @@ Scan and analyze workflow session directories:
**Staleness criteria**:
- Active sessions: No modification >7 days + no related git commits
- Archives: >30 days old + no feature references in project.json
- Archives: >30 days old + no feature references in project-tech.json
- Lite-plan: >7 days old + plan.json not executed
- Debug: >3 days old + issue not in recent commits
@@ -443,8 +443,8 @@ if (selectedCategories.includes('Sessions')) {
}
}
// Update project.json if features referenced deleted sessions
const projectPath = '.workflow/project.json'
// Update project-tech.json if features referenced deleted sessions
const projectPath = '.workflow/project-tech.json'
if (fileExists(projectPath)) {
const project = JSON.parse(Read(projectPath))
const deletedPaths = new Set(results.deleted)

View File

@@ -108,11 +108,24 @@ Analyze project for workflow initialization and generate .workflow/project-tech.
2. Execute: ccw tool exec get_modules_by_depth '{}' (get project structure)
## Task
Generate complete project-tech.json with:
- project_metadata: {name: ${projectName}, root_path: ${projectRoot}, initialized_at, updated_at}
- technology_analysis: {description, languages, frameworks, build_tools, test_frameworks, architecture, key_components, dependencies}
- development_status: ${regenerate ? 'preserve from backup' : '{completed_features: [], development_index: {feature: [], enhancement: [], bugfix: [], refactor: [], docs: []}, statistics: {total_features: 0, total_sessions: 0, last_updated}}'}
- _metadata: {initialized_by: "cli-explore-agent", analysis_timestamp, analysis_mode}
Generate complete project-tech.json following the schema structure:
- project_name: "${projectName}"
- initialized_at: ISO 8601 timestamp
- overview: {
description: "Brief project description",
technology_stack: {
languages: [{name, file_count, primary}],
frameworks: ["string"],
build_tools: ["string"],
test_frameworks: ["string"]
},
architecture: {style, layers: [], patterns: []},
key_components: [{name, path, description, importance}]
}
- features: []
- development_index: ${regenerate ? 'preserve from backup' : '{feature: [], enhancement: [], bugfix: [], refactor: [], docs: []}'}
- statistics: ${regenerate ? 'preserve from backup' : '{total_features: 0, total_sessions: 0, last_updated: ISO timestamp}'}
- _metadata: {initialized_by: "cli-explore-agent", analysis_timestamp: ISO timestamp, analysis_mode: "deep-scan"}
## Analysis Requirements
@@ -181,16 +194,16 @@ console.log(`
✓ Project initialized successfully
## Project Overview
Name: ${projectTech.project_metadata.name}
Description: ${projectTech.technology_analysis.description}
Name: ${projectTech.project_name}
Description: ${projectTech.overview.description}
### Technology Stack
Languages: ${projectTech.technology_analysis.languages.map(l => l.name).join(', ')}
Frameworks: ${projectTech.technology_analysis.frameworks.join(', ')}
Languages: ${projectTech.overview.technology_stack.languages.map(l => l.name).join(', ')}
Frameworks: ${projectTech.overview.technology_stack.frameworks.join(', ')}
### Architecture
Style: ${projectTech.technology_analysis.architecture.style}
Components: ${projectTech.technology_analysis.key_components.length} core modules
Style: ${projectTech.overview.architecture.style}
Components: ${projectTech.overview.key_components.length} core modules
---
Files created:

View File

@@ -531,11 +531,11 @@ if (hasUnresolvedIssues(reviewResult)) {
**Trigger**: After all executions complete (regardless of code review)
**Skip Condition**: Skip if `.workflow/project.json` does not exist
**Skip Condition**: Skip if `.workflow/project-tech.json` does not exist
**Operations**:
```javascript
const projectJsonPath = '.workflow/project.json'
const projectJsonPath = '.workflow/project-tech.json'
if (!fileExists(projectJsonPath)) return // Silent skip
const projectJson = JSON.parse(Read(projectJsonPath))

View File

@@ -107,13 +107,13 @@ rm -f .workflow/archives/$SESSION_ID/.archiving
Manifest: Updated with N total sessions
```
### Phase 4: Update project.json (Optional)
### Phase 4: Update project-tech.json (Optional)
**Skip if**: `.workflow/project.json` doesn't exist
**Skip if**: `.workflow/project-tech.json` doesn't exist
```bash
# Check
test -f .workflow/project.json || echo "SKIP"
test -f .workflow/project-tech.json || echo "SKIP"
```
**If exists**, add feature entry:
@@ -149,5 +149,5 @@ test -f .workflow/project.json || echo "SKIP"
Phase 1: find session → create .archiving marker
Phase 2: read key files → build manifest entry (no writes)
Phase 3: mkdir → mv → update manifest.json → rm marker
Phase 4: update project.json features array (optional)
Phase 4: update project-tech.json features array (optional)
```

View File

@@ -16,7 +16,7 @@ examples:
Manages workflow sessions with three operation modes: discovery (manual), auto (intelligent), and force-new.
**Dual Responsibility**:
1. **Project-level initialization** (first-time only): Creates `.workflow/project.json` for feature registry
1. **Project-level initialization** (first-time only): Creates `.workflow/project-tech.json` for feature registry
2. **Session-level initialization** (always): Creates session directory structure
## Session Types

View File

@@ -0,0 +1,141 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Project Guidelines Schema",
"description": "Schema for project-guidelines.json - user-maintained rules and constraints",
"type": "object",
"required": ["conventions", "constraints", "_metadata"],
"properties": {
"conventions": {
"type": "object",
"description": "Coding conventions and standards",
"required": ["coding_style", "naming_patterns", "file_structure", "documentation"],
"properties": {
"coding_style": {
"type": "array",
"items": { "type": "string" },
"description": "Coding style rules (e.g., 'Use strict TypeScript mode', 'Prefer const over let')"
},
"naming_patterns": {
"type": "array",
"items": { "type": "string" },
"description": "Naming conventions (e.g., 'Use camelCase for variables', 'Use PascalCase for components')"
},
"file_structure": {
"type": "array",
"items": { "type": "string" },
"description": "File organization rules (e.g., 'One component per file', 'Tests alongside source files')"
},
"documentation": {
"type": "array",
"items": { "type": "string" },
"description": "Documentation requirements (e.g., 'JSDoc for public APIs', 'README for each module')"
}
}
},
"constraints": {
"type": "object",
"description": "Technical constraints and boundaries",
"required": ["architecture", "tech_stack", "performance", "security"],
"properties": {
"architecture": {
"type": "array",
"items": { "type": "string" },
"description": "Architecture constraints (e.g., 'No circular dependencies', 'Services must be stateless')"
},
"tech_stack": {
"type": "array",
"items": { "type": "string" },
"description": "Technology constraints (e.g., 'No new dependencies without review', 'Use native fetch over axios')"
},
"performance": {
"type": "array",
"items": { "type": "string" },
"description": "Performance requirements (e.g., 'API response < 200ms', 'Bundle size < 500KB')"
},
"security": {
"type": "array",
"items": { "type": "string" },
"description": "Security requirements (e.g., 'Sanitize all user input', 'No secrets in code')"
}
}
},
"quality_rules": {
"type": "array",
"description": "Enforceable quality rules",
"items": {
"type": "object",
"required": ["rule", "scope"],
"properties": {
"rule": {
"type": "string",
"description": "The quality rule statement"
},
"scope": {
"type": "string",
"description": "Where the rule applies (e.g., 'all', 'src/**', 'tests/**')"
},
"enforced_by": {
"type": "string",
"description": "How the rule is enforced (e.g., 'eslint', 'pre-commit', 'code-review')"
}
}
}
},
"learnings": {
"type": "array",
"description": "Project learnings captured from workflow sessions",
"items": {
"type": "object",
"required": ["date", "insight"],
"properties": {
"date": {
"type": "string",
"format": "date",
"description": "Date the learning was captured (YYYY-MM-DD)"
},
"session_id": {
"type": "string",
"description": "WFS session ID where the learning originated"
},
"insight": {
"type": "string",
"description": "The learning or insight captured"
},
"context": {
"type": "string",
"description": "Additional context about when/why this learning applies"
},
"category": {
"type": "string",
"enum": ["architecture", "performance", "security", "testing", "workflow", "other"],
"description": "Category of the learning"
}
}
}
},
"_metadata": {
"type": "object",
"required": ["created_at", "version"],
"properties": {
"created_at": {
"type": "string",
"format": "date-time",
"description": "ISO 8601 timestamp of creation"
},
"version": {
"type": "string",
"description": "Schema version (e.g., '1.0.0')"
},
"last_updated": {
"type": "string",
"format": "date-time",
"description": "ISO 8601 timestamp of last update"
},
"updated_by": {
"type": "string",
"description": "Who/what last updated the file (e.g., 'user', 'workflow:session:solidify')"
}
}
}
}
}

View File

@@ -1,7 +1,7 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "Project Metadata Schema",
"description": "Workflow initialization metadata for project-level context",
"title": "Project Tech Schema",
"description": "Schema for project-tech.json - auto-generated technical analysis (stack, architecture, components)",
"type": "object",
"required": [
"project_name",

View File

@@ -140,12 +140,25 @@ interface ProjectGuidelines {
};
}
interface Language {
name: string;
file_count: number;
primary: boolean;
}
interface KeyComponent {
name: string;
path: string;
description: string;
importance: 'high' | 'medium' | 'low';
}
interface ProjectOverview {
projectName: string;
description: string;
initializedAt: string | null;
technologyStack: {
languages: string[];
languages: Language[];
frameworks: string[];
build_tools: string[];
test_frameworks: string[];
@@ -155,7 +168,7 @@ interface ProjectOverview {
layers: string[];
patterns: string[];
};
keyComponents: string[];
keyComponents: KeyComponent[];
features: unknown[];
developmentIndex: {
feature: unknown[];
@@ -187,13 +200,12 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s
// Initialize cache manager
const cache = createDashboardCache(workflowDir);
// Prepare paths to watch for changes (includes both new dual files and legacy)
// Prepare paths to watch for changes
const watchPaths = [
join(workflowDir, 'active'),
join(workflowDir, 'archives'),
join(workflowDir, 'project-tech.json'),
join(workflowDir, 'project-guidelines.json'),
join(workflowDir, 'project.json'), // Legacy support
...sessions.active.map(s => s.path),
...sessions.archived.map(s => s.path)
];
@@ -266,7 +278,7 @@ export async function aggregateData(sessions: ScanSessionsResult, workflowDir: s
console.error('Error scanning lite tasks:', (err as Error).message);
}
// Load project overview from project.json
// Load project overview from project-tech.json
try {
data.projectOverview = loadProjectOverview(workflowDir);
} catch (err) {
@@ -553,31 +565,25 @@ function sortTaskIds(a: string, b: string): number {
/**
* Load project overview from project-tech.json and project-guidelines.json
* Supports dual file structure with backward compatibility for legacy project.json
* @param workflowDir - Path to .workflow directory
* @returns Project overview data or null if not found
*/
function loadProjectOverview(workflowDir: string): ProjectOverview | null {
const techFile = join(workflowDir, 'project-tech.json');
const guidelinesFile = join(workflowDir, 'project-guidelines.json');
const legacyFile = join(workflowDir, 'project.json');
// Check for new dual file structure first, fallback to legacy
const useLegacy = !existsSync(techFile) && existsSync(legacyFile);
const projectFile = useLegacy ? legacyFile : techFile;
if (!existsSync(projectFile)) {
console.log(`Project file not found at: ${projectFile}`);
if (!existsSync(techFile)) {
console.log(`Project file not found at: ${techFile}`);
return null;
}
try {
const fileContent = readFileSync(projectFile, 'utf8');
const fileContent = readFileSync(techFile, 'utf8');
const projectData = JSON.parse(fileContent) as Record<string, unknown>;
console.log(`Successfully loaded project overview: ${projectData.project_name || 'Unknown'} (${useLegacy ? 'legacy' : 'tech'})`);
console.log(`Successfully loaded project overview: ${projectData.project_name || 'Unknown'}`);
// Parse tech data (compatible with both legacy and new structure)
// Parse tech data from project-tech.json structure
const overview = projectData.overview as Record<string, unknown> | undefined;
const technologyAnalysis = projectData.technology_analysis as Record<string, unknown> | undefined;
const developmentStatus = projectData.development_status as Record<string, unknown> | undefined;
@@ -645,7 +651,7 @@ function loadProjectOverview(workflowDir: string): ProjectOverview | null {
description: (overview?.description as string) || '',
initializedAt: (projectData.initialized_at as string) || null,
technologyStack: {
languages: extractStringArray(technologyStack?.languages),
languages: (technologyStack?.languages as Language[]) || [],
frameworks: extractStringArray(technologyStack?.frameworks),
build_tools: extractStringArray(technologyStack?.build_tools),
test_frameworks: extractStringArray(technologyStack?.test_frameworks)
@@ -655,7 +661,7 @@ function loadProjectOverview(workflowDir: string): ProjectOverview | null {
layers: extractStringArray(architecture?.layers as unknown[] | undefined),
patterns: extractStringArray(architecture?.patterns as unknown[] | undefined)
},
keyComponents: extractStringArray(overview?.key_components as unknown[] | undefined),
keyComponents: (overview?.key_components as KeyComponent[]) || [],
features: (projectData.features as unknown[]) || [],
developmentIndex: {
feature: (developmentIndex?.feature as unknown[]) || [],
@@ -677,7 +683,7 @@ function loadProjectOverview(workflowDir: string): ProjectOverview | null {
guidelines
};
} catch (err) {
console.error(`Failed to parse project file at ${projectFile}:`, (err as Error).message);
console.error(`Failed to parse project file at ${techFile}:`, (err as Error).message);
console.error('Error stack:', (err as Error).stack);
return null;
}