Add quality standards and team command design patterns documentation

- Introduced a new quality standards document outlining assessment criteria for team command .md files, including completeness, pattern compliance, integration, and consistency dimensions.
- Established quality gates and issue classification for errors, warnings, and informational notes.
- Created a comprehensive team command design patterns document detailing infrastructure and collaboration patterns, including message bus integration, YAML front matter requirements, task lifecycle, five-phase execution structure, and error handling.
- Included a pattern selection guide for collaboration scenarios to enhance team interaction models.
This commit is contained in:
catlog22
2026-02-13 23:39:06 +08:00
parent 5ad7a954d4
commit cdb240d2c2
61 changed files with 5181 additions and 14525 deletions

View File

@@ -3,7 +3,7 @@
// ========================================
// Table component displaying all recent projects with MCP server statistics
import { useState, useEffect } from 'react';
import { useState, useEffect, useMemo } from 'react';
import { useIntl } from 'react-intl';
import { Folder, Clock, Database, ExternalLink } from 'lucide-react';
import { Card } from '@/components/ui/Card';
@@ -55,8 +55,11 @@ export function AllProjectsTable({
const { projects, currentProject, isLoading } = useProjectOperations();
// Use provided project paths or default to all projects
const targetProjectPaths = propProjectPaths ?? projects;
const displayProjects = maxProjects ? targetProjectPaths.slice(0, maxProjects) : targetProjectPaths;
// Memoize to stabilize the array reference and prevent useEffect infinite loops
const displayProjects = useMemo(() => {
const target = propProjectPaths ?? projects;
return maxProjects ? target.slice(0, maxProjects) : target;
}, [propProjectPaths, projects, maxProjects]);
// Fetch real project server stats on mount
useEffect(() => {

View File

@@ -473,6 +473,8 @@ export interface UseProjectOperationsReturn {
isFetchingServers: boolean;
}
const EMPTY_PROJECTS: string[] = [];
/**
* Combined hook for project operations (all projects, cross-CLI copy, other projects' servers)
*/
@@ -519,7 +521,7 @@ export function useProjectOperations(): UseProjectOperationsReturn {
};
return {
projects: projectsQuery.data?.projects ?? [],
projects: projectsQuery.data?.projects ?? EMPTY_PROJECTS,
currentProject: projectsQuery.data?.currentProject ?? projectPath ?? undefined,
isLoading: projectsQuery.isLoading,
error: projectsQuery.error,

View File

@@ -70,13 +70,16 @@ interface TaskTest {
integration?: string[]; // Integration test requirements
commands?: string[]; // Test commands to run
coverage_target?: number; // Minimum coverage % (optional)
manual_checks?: string[]; // Manual verification steps (migrated from acceptance)
success_metrics?: string[]; // Success metrics (task-schema convention)
}
interface TaskAcceptance {
criteria: string[]; // Acceptance criteria (testable)
verification: string[]; // How to verify each criterion
manual_checks?: string[]; // Manual verification steps if needed
interface TaskConvergence {
criteria: string[]; // Convergence criteria (testable)
verification?: string | string[]; // How to verify (string or array)
definition_of_done?: string; // Definition of done (optional)
}
type TaskAcceptance = TaskConvergence; // Backward compat alias
interface TaskCommit {
type: 'feat' | 'fix' | 'refactor' | 'test' | 'docs' | 'chore';
@@ -91,19 +94,26 @@ interface SolutionTask {
scope: string;
action: string;
description?: string;
// New fields (preferred)
files?: { path: string; action?: string; target?: string; change?: string; changes?: string[]; conflict_risk?: string }[];
convergence?: TaskConvergence;
// Legacy fields (backward compat read)
modification_points?: { file: string; target: string; change: string }[];
acceptance?: TaskAcceptance;
// Lifecycle phases (closed-loop)
implementation: string[]; // Implementation steps
test: TaskTest; // Test requirements
regression: string[]; // Regression check points
acceptance: TaskAcceptance; // Acceptance criteria & verification
commit: TaskCommit; // Commit specification
depends_on: string[];
estimated_minutes?: number;
effort?: string; // Effort estimate (task-schema: "small"|"medium"|"large"|"xlarge")
status?: string;
priority?: number;
priority?: string | number; // String enum or legacy number(1-5)
}
interface Solution {
@@ -119,6 +129,17 @@ interface Solution {
bound_at?: string;
}
/** Extract file paths from a task (dual-read: new `files` field or legacy `modification_points`) */
function getTaskFiles(task: SolutionTask): string[] {
if (task.files && task.files.length > 0) {
return task.files.map(f => f.path).filter(Boolean);
}
if (task.modification_points) {
return task.modification_points.map(mp => mp.file).filter(Boolean);
}
return [];
}
// Structured failure detail for debugging
interface FailureDetail {
task_id?: string; // Which task failed within the solution
@@ -1290,15 +1311,13 @@ async function solutionAction(issueId: string | undefined, options: IssueOptions
}
}
// Brief mode: extract files_touched from modification_points
// Brief mode: extract files_touched from files/modification_points
if (options.brief) {
const briefSolutions = targetSolutions.map(sol => {
const filesTouched = new Set<string>();
for (const task of sol.tasks) {
if (task.modification_points) {
for (const mp of task.modification_points) {
if (mp.file) filesTouched.add(mp.file);
}
for (const f of getTaskFiles(task)) {
filesTouched.add(f);
}
}
return {
@@ -1374,10 +1393,8 @@ async function solutionsAction(options: IssueOptions): Promise<void> {
if (boundSolution) {
const filesTouched = new Set<string>();
for (const task of boundSolution.tasks) {
if (task.modification_points) {
for (const mp of task.modification_points) {
if (mp.file) filesTouched.add(mp.file);
}
for (const f of getTaskFiles(task)) {
filesTouched.add(f);
}
}
@@ -2198,8 +2215,8 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
const solution = findSolution(item.issue_id, item.solution_id);
if (solution?.tasks) {
for (const task of solution.tasks) {
for (const mp of task.modification_points || []) {
solutionFiles.push(mp.file);
for (const f of getTaskFiles(task)) {
solutionFiles.push(f);
}
}
}
@@ -2377,8 +2394,8 @@ async function queueAction(subAction: string | undefined, issueId: string | unde
// Collect all files touched by this solution
const filesTouched = new Set<string>();
for (const task of solution.tasks || []) {
for (const mp of task.modification_points || []) {
filesTouched.add(mp.file);
for (const f of getTaskFiles(task)) {
filesTouched.add(f);
}
}

View File

@@ -620,6 +620,19 @@ describe('issue command module', async () => {
describe('Queue Formation', () => {
function makeSolutionWithFiles(id: string, files: string[], isBound = true): MockSolution {
return createMockSolution({
id,
is_bound: isBound,
tasks: [
{
id: 'T1',
files: files.map((file) => ({ path: file, target: 'x', change: 'y' })),
},
],
});
}
function makeSolutionWithLegacyFiles(id: string, files: string[], isBound = true): MockSolution {
return createMockSolution({
id,
is_bound: isBound,
@@ -712,7 +725,7 @@ describe('issue command module', async () => {
assert.equal(items[0].solution_id, solutionId);
});
it('deduplicates files_touched extracted from modification_points', async () => {
it('deduplicates files_touched extracted from files (new format)', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);
@@ -735,6 +748,29 @@ describe('issue command module', async () => {
assert.deepEqual(items[0].files_touched?.sort(), ['src/dup.ts', 'src/other.ts']);
});
it('extracts files_touched from legacy modification_points format', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);
mock.method(console, 'log', () => {});
mock.method(console, 'error', () => {});
const issueId = 'ISS-QUEUE-LEGACY';
const solutionId = 'SOL-ISS-QUEUE-LEGACY-1';
const files = ['src/legacy-a.ts', 'src/legacy-b.ts'];
issueModule.writeIssues([createMockIssue({ id: issueId, status: 'planned', bound_solution_id: solutionId })]);
issueModule.writeSolutions(issueId, [makeSolutionWithLegacyFiles(solutionId, files, true)]);
await issueModule.issueCommand('queue', ['add', issueId], {});
const queue = issueModule.readQueue();
assert.ok(queue);
const items = queue.solutions || [];
assert.equal(items.length, 1);
assert.deepEqual(items[0].files_touched?.sort(), files.slice().sort());
});
it('adds multiple issues to the same active queue with incrementing item IDs', async () => {
issueModule ??= await import(issueCommandUrl);
assert.ok(env);