mirror of
https://github.com/catlog22/Claude-Code-Workflow.git
synced 2026-02-14 02:42:04 +08:00
Compare commits
7 Commits
claude/add
...
claude/opt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ca9653c2e6 | ||
|
|
751d251433 | ||
|
|
51b1eb5da6 | ||
|
|
275ed051c6 | ||
|
|
5e69748016 | ||
|
|
8ae3da8f61 | ||
|
|
758321b829 |
@@ -89,7 +89,7 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${project_name} -type f -name "*.md" ! -path "*/README.md" ! -path "*/ARCHITECTURE.md" ! -path "*/EXAMPLES.md" ! -path "*/api/*" 2>/dev/null | xargs cat 2>/dev/null; fi)
|
||||
```
|
||||
|
||||
**Data Processing**: Parse bash outputs, calculate statistics, use **Write tool** to create `${session_dir}/.process/phase2-analysis.json` with structure:
|
||||
**Data Processing**: Parse bash outputs, calculate statistics, use **Write tool** to create `${session_dir}/.process/doc-planning-data.json` with structure:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -118,7 +118,7 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
|
||||
**Then** use **Edit tool** to update `workflow-session.json` adding analysis field.
|
||||
|
||||
**Output**: Single `phase2-analysis.json` with all analysis data (no temp files or Python scripts).
|
||||
**Output**: Single `doc-planning-data.json` with all analysis data (no temp files or Python scripts).
|
||||
|
||||
**Auto-skipped**: Tests (`**/test/**`, `**/*.test.*`), Build (`**/node_modules/**`, `**/dist/**`), Config (root-level files), Vendor directories.
|
||||
|
||||
@@ -127,8 +127,8 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
**Commands**:
|
||||
|
||||
```bash
|
||||
# Count existing docs from phase2-analysis.json
|
||||
bash(cat .workflow/active/WFS-docs-{timestamp}/.process/phase2-analysis.json | jq '.existing_docs.file_list | length')
|
||||
# Count existing docs from doc-planning-data.json
|
||||
bash(cat .workflow/active/WFS-docs-{timestamp}/.process/doc-planning-data.json | jq '.existing_docs.file_list | length')
|
||||
```
|
||||
|
||||
**Data Processing**: Use count result, then use **Edit tool** to update `workflow-session.json`:
|
||||
@@ -182,8 +182,8 @@ Large Projects (single dir >10 docs):
|
||||
**Commands**:
|
||||
|
||||
```bash
|
||||
# 1. Get top-level directories from phase2-analysis.json
|
||||
bash(cat .workflow/active/WFS-docs-{timestamp}/.process/phase2-analysis.json | jq -r '.top_level_dirs[]')
|
||||
# 1. Get top-level directories from doc-planning-data.json
|
||||
bash(cat .workflow/active/WFS-docs-{timestamp}/.process/doc-planning-data.json | jq -r '.top_level_dirs[]')
|
||||
|
||||
# 2. Get mode from workflow-session.json
|
||||
bash(cat .workflow/active/WFS-docs-{timestamp}/workflow-session.json | jq -r '.mode // "full"')
|
||||
@@ -201,7 +201,7 @@ bash(grep -r "router\.|@Get\|@Post" src/ 2>/dev/null && echo "API_FOUND" || echo
|
||||
- If total ≤10 docs: create group
|
||||
- If total >10 docs: split to 1 dir/group or subdivide
|
||||
- If single dir >10 docs: split by subdirectories
|
||||
3. Use **Edit tool** to update `phase2-analysis.json` adding groups field:
|
||||
3. Use **Edit tool** to update `doc-planning-data.json` adding groups field:
|
||||
```json
|
||||
"groups": {
|
||||
"count": 3,
|
||||
@@ -215,7 +215,7 @@ bash(grep -r "router\.|@Get\|@Post" src/ 2>/dev/null && echo "API_FOUND" || echo
|
||||
|
||||
**Task ID Calculation**:
|
||||
```bash
|
||||
group_count=$(jq '.groups.count' .workflow/active/WFS-docs-{timestamp}/.process/phase2-analysis.json)
|
||||
group_count=$(jq '.groups.count' .workflow/active/WFS-docs-{timestamp}/.process/doc-planning-data.json)
|
||||
readme_id=$((group_count + 1)) # Next ID after groups
|
||||
arch_id=$((group_count + 2))
|
||||
api_id=$((group_count + 3))
|
||||
@@ -237,7 +237,7 @@ api_id=$((group_count + 3))
|
||||
|
||||
**Generation Process**:
|
||||
1. Read configuration values (tool, cli_execute, mode) from workflow-session.json
|
||||
2. Read group assignments from phase2-analysis.json
|
||||
2. Read group assignments from doc-planning-data.json
|
||||
3. Generate Level 1 tasks (IMPL-001 to IMPL-N, one per group)
|
||||
4. Generate Level 2+ tasks if mode=full (README, ARCHITECTURE, HTTP API)
|
||||
|
||||
@@ -262,14 +262,14 @@ api_id=$((group_count + 3))
|
||||
},
|
||||
"context": {
|
||||
"requirements": [
|
||||
"Process directories from group ${group_number} in phase2-analysis.json",
|
||||
"Process directories from group ${group_number} in doc-planning-data.json",
|
||||
"Generate docs to .workflow/docs/${project_name}/ (mirrored structure)",
|
||||
"Code folders: API.md + README.md; Navigation folders: README.md only",
|
||||
"Use pre-analyzed data from Phase 2 (no redundant analysis)"
|
||||
],
|
||||
"focus_paths": ["${group_dirs_from_json}"],
|
||||
"precomputed_data": {
|
||||
"phase2_analysis": "${session_dir}/.process/phase2-analysis.json"
|
||||
"phase2_analysis": "${session_dir}/.process/doc-planning-data.json"
|
||||
}
|
||||
},
|
||||
"flow_control": {
|
||||
@@ -278,8 +278,8 @@ api_id=$((group_count + 3))
|
||||
"step": "load_precomputed_data",
|
||||
"action": "Load Phase 2 analysis and extract group directories",
|
||||
"commands": [
|
||||
"bash(cat ${session_dir}/.process/phase2-analysis.json)",
|
||||
"bash(jq '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories' ${session_dir}/.process/phase2-analysis.json)"
|
||||
"bash(cat ${session_dir}/.process/doc-planning-data.json)",
|
||||
"bash(jq '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories' ${session_dir}/.process/doc-planning-data.json)"
|
||||
],
|
||||
"output_to": "phase2_context",
|
||||
"note": "Single JSON file contains all Phase 2 analysis results"
|
||||
@@ -324,7 +324,7 @@ api_id=$((group_count + 3))
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Batch generate documentation via CLI",
|
||||
"command": "bash(dirs=$(jq -r '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories[]' ${session_dir}/.process/phase2-analysis.json); for dir in $dirs; do cd \"$dir\" && gemini --approval-mode yolo -p \"PURPOSE: Generate module docs\\nTASK: Create documentation\\nMODE: write\\nCONTEXT: @**/* [phase2_context]\\nEXPECTED: API.md and README.md\\nRULES: Mirror structure\" || echo \"Failed: $dir\"; cd -; done)",
|
||||
"command": "bash(dirs=$(jq -r '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories[]' ${session_dir}/.process/doc-planning-data.json); for dir in $dirs; do cd \"$dir\" && gemini --approval-mode yolo -p \"PURPOSE: Generate module docs\\nTASK: Create documentation\\nMODE: write\\nCONTEXT: @**/* [phase2_context]\\nEXPECTED: API.md and README.md\\nRULES: Mirror structure\" || echo \"Failed: $dir\"; cd -; done)",
|
||||
"depends_on": [1],
|
||||
"output": "generated_docs"
|
||||
}
|
||||
@@ -464,7 +464,7 @@ api_id=$((group_count + 3))
|
||||
├── IMPL_PLAN.md
|
||||
├── TODO_LIST.md
|
||||
├── .process/
|
||||
│ └── phase2-analysis.json # All Phase 2 analysis data (replaces 7+ files)
|
||||
│ └── doc-planning-data.json # All Phase 2 analysis data (replaces 7+ files)
|
||||
└── .task/
|
||||
├── IMPL-001.json # Small: all modules | Large: group 1
|
||||
├── IMPL-00N.json # (Large only: groups 2-N)
|
||||
@@ -473,7 +473,7 @@ api_id=$((group_count + 3))
|
||||
└── IMPL-{N+3}.json # HTTP API (optional)
|
||||
```
|
||||
|
||||
**phase2-analysis.json Structure**:
|
||||
**doc-planning-data.json Structure**:
|
||||
```json
|
||||
{
|
||||
"metadata": {
|
||||
|
||||
@@ -89,7 +89,7 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${project_name} -type f -name "*.md" ! -path "*/README.md" ! -path "*/ARCHITECTURE.md" ! -path "*/EXAMPLES.md" ! -path "*/api/*" 2>/dev/null | xargs cat 2>/dev/null; fi)
|
||||
```
|
||||
|
||||
**Data Processing**: Parse bash outputs, calculate statistics, use **Write tool** to create `${session_dir}/.process/phase2-analysis.json` with structure:
|
||||
**Data Processing**: Parse bash outputs, calculate statistics, use **Write tool** to create `${session_dir}/.process/doc-planning-data.json` with structure:
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -118,7 +118,7 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
|
||||
**Then** use **Edit tool** to update `workflow-session.json` adding analysis field.
|
||||
|
||||
**Output**: Single `phase2-analysis.json` with all analysis data (no temp files or Python scripts).
|
||||
**Output**: Single `doc-planning-data.json` with all analysis data (no temp files or Python scripts).
|
||||
|
||||
**Auto-skipped**: Tests (`**/test/**`, `**/*.test.*`), Build (`**/node_modules/**`, `**/dist/**`), Config (root-level files), Vendor directories.
|
||||
|
||||
@@ -127,8 +127,8 @@ bash(if [ -d .workflow/docs/\${project_name} ]; then find .workflow/docs/\${proj
|
||||
**Commands**:
|
||||
|
||||
```bash
|
||||
# Count existing docs from phase2-analysis.json
|
||||
bash(cat .workflow/WFS-docs-{timestamp}/.process/phase2-analysis.json | jq '.existing_docs.file_list | length')
|
||||
# Count existing docs from doc-planning-data.json
|
||||
bash(cat .workflow/WFS-docs-{timestamp}/.process/doc-planning-data.json | jq '.existing_docs.file_list | length')
|
||||
```
|
||||
|
||||
**Data Processing**: Use count result, then use **Edit tool** to update `workflow-session.json`:
|
||||
@@ -182,8 +182,8 @@ Large Projects (single dir >10 docs):
|
||||
**Commands**:
|
||||
|
||||
```bash
|
||||
# 1. Get top-level directories from phase2-analysis.json
|
||||
bash(cat .workflow/WFS-docs-{timestamp}/.process/phase2-analysis.json | jq -r '.top_level_dirs[]')
|
||||
# 1. Get top-level directories from doc-planning-data.json
|
||||
bash(cat .workflow/WFS-docs-{timestamp}/.process/doc-planning-data.json | jq -r '.top_level_dirs[]')
|
||||
|
||||
# 2. Get mode from workflow-session.json
|
||||
bash(cat .workflow/WFS-docs-{timestamp}/workflow-session.json | jq -r '.mode // "full"')
|
||||
@@ -201,7 +201,7 @@ bash(grep -r "router\.|@Get\|@Post" src/ 2>/dev/null && echo "API_FOUND" || echo
|
||||
- If total ≤10 docs: create group
|
||||
- If total >10 docs: split to 1 dir/group or subdivide
|
||||
- If single dir >10 docs: split by subdirectories
|
||||
3. Use **Edit tool** to update `phase2-analysis.json` adding groups field:
|
||||
3. Use **Edit tool** to update `doc-planning-data.json` adding groups field:
|
||||
```json
|
||||
"groups": {
|
||||
"count": 3,
|
||||
@@ -215,7 +215,7 @@ bash(grep -r "router\.|@Get\|@Post" src/ 2>/dev/null && echo "API_FOUND" || echo
|
||||
|
||||
**Task ID Calculation**:
|
||||
```bash
|
||||
group_count=$(jq '.groups.count' .workflow/WFS-docs-{timestamp}/.process/phase2-analysis.json)
|
||||
group_count=$(jq '.groups.count' .workflow/WFS-docs-{timestamp}/.process/doc-planning-data.json)
|
||||
readme_id=$((group_count + 1)) # Next ID after groups
|
||||
arch_id=$((group_count + 2))
|
||||
api_id=$((group_count + 3))
|
||||
@@ -237,7 +237,7 @@ api_id=$((group_count + 3))
|
||||
|
||||
**Generation Process**:
|
||||
1. Read configuration values (tool, cli_execute, mode) from workflow-session.json
|
||||
2. Read group assignments from phase2-analysis.json
|
||||
2. Read group assignments from doc-planning-data.json
|
||||
3. Generate Level 1 tasks (IMPL-001 to IMPL-N, one per group)
|
||||
4. Generate Level 2+ tasks if mode=full (README, ARCHITECTURE, HTTP API)
|
||||
|
||||
@@ -262,14 +262,14 @@ api_id=$((group_count + 3))
|
||||
},
|
||||
"context": {
|
||||
"requirements": [
|
||||
"Process directories from group ${group_number} in phase2-analysis.json",
|
||||
"Process directories from group ${group_number} in doc-planning-data.json",
|
||||
"Generate docs to .workflow/docs/${project_name}/ (mirrored structure)",
|
||||
"Code folders: API.md + README.md; Navigation folders: README.md only",
|
||||
"Use pre-analyzed data from Phase 2 (no redundant analysis)"
|
||||
],
|
||||
"focus_paths": ["${group_dirs_from_json}"],
|
||||
"precomputed_data": {
|
||||
"phase2_analysis": "${session_dir}/.process/phase2-analysis.json"
|
||||
"phase2_analysis": "${session_dir}/.process/doc-planning-data.json"
|
||||
}
|
||||
},
|
||||
"flow_control": {
|
||||
@@ -278,8 +278,8 @@ api_id=$((group_count + 3))
|
||||
"step": "load_precomputed_data",
|
||||
"action": "Load Phase 2 analysis and extract group directories",
|
||||
"commands": [
|
||||
"bash(cat ${session_dir}/.process/phase2-analysis.json)",
|
||||
"bash(jq '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories' ${session_dir}/.process/phase2-analysis.json)"
|
||||
"bash(cat ${session_dir}/.process/doc-planning-data.json)",
|
||||
"bash(jq '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories' ${session_dir}/.process/doc-planning-data.json)"
|
||||
],
|
||||
"output_to": "phase2_context",
|
||||
"note": "Single JSON file contains all Phase 2 analysis results"
|
||||
@@ -324,7 +324,7 @@ api_id=$((group_count + 3))
|
||||
{
|
||||
"step": 2,
|
||||
"title": "Batch generate documentation via CLI",
|
||||
"command": "bash(dirs=$(jq -r '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories[]' ${session_dir}/.process/phase2-analysis.json); for dir in $dirs; do cd \"$dir\" && gemini --approval-mode yolo -p \"PURPOSE: Generate module docs\\nTASK: Create documentation\\nMODE: write\\nCONTEXT: @**/* [phase2_context]\\nEXPECTED: API.md and README.md\\nRULES: Mirror structure\" || echo \"Failed: $dir\"; cd -; done)",
|
||||
"command": "bash(dirs=$(jq -r '.groups.assignments[] | select(.group_id == \"${group_number}\") | .directories[]' ${session_dir}/.process/doc-planning-data.json); for dir in $dirs; do cd \"$dir\" && gemini --approval-mode yolo -p \"PURPOSE: Generate module docs\\nTASK: Create documentation\\nMODE: write\\nCONTEXT: @**/* [phase2_context]\\nEXPECTED: API.md and README.md\\nRULES: Mirror structure\" || echo \"Failed: $dir\"; cd -; done)",
|
||||
"depends_on": [1],
|
||||
"output": "generated_docs"
|
||||
}
|
||||
@@ -464,7 +464,7 @@ api_id=$((group_count + 3))
|
||||
├── IMPL_PLAN.md
|
||||
├── TODO_LIST.md
|
||||
├── .process/
|
||||
│ └── phase2-analysis.json # All Phase 2 analysis data (replaces 7+ files)
|
||||
│ └── doc-planning-data.json # All Phase 2 analysis data (replaces 7+ files)
|
||||
└── .task/
|
||||
├── IMPL-001.json # Small: all modules | Large: group 1
|
||||
├── IMPL-00N.json # (Large only: groups 2-N)
|
||||
@@ -473,7 +473,7 @@ api_id=$((group_count + 3))
|
||||
└── IMPL-{N+3}.json # HTTP API (optional)
|
||||
```
|
||||
|
||||
**phase2-analysis.json Structure**:
|
||||
**doc-planning-data.json Structure**:
|
||||
```json
|
||||
{
|
||||
"metadata": {
|
||||
|
||||
@@ -13,23 +13,23 @@ flowchart TD
|
||||
Q1 -->|不知道| Ideation[💡 构思阶段<br>需求探索]
|
||||
Q1 -->|知道| Q2{知道怎么做吗?}
|
||||
|
||||
Ideation --> BrainIdea[/workflow:brainstorm:auto-parallel<br>探索产品方向和功能定位]
|
||||
Ideation --> BrainIdea[/ /workflow:brainstorm:auto-parallel<br>探索产品方向和功能定位 /]
|
||||
BrainIdea --> Q2
|
||||
|
||||
Q2 -->|不知道| Design[🏗️ 设计探索阶段<br>架构方案探索]
|
||||
Q2 -->|知道| Q3{需要UI设计吗?}
|
||||
|
||||
Design --> BrainDesign[/workflow:brainstorm:auto-parallel<br>探索技术方案和架构]
|
||||
Design --> BrainDesign[/ /workflow:brainstorm:auto-parallel<br>探索技术方案和架构 /]
|
||||
BrainDesign --> Q3
|
||||
|
||||
Q3 -->|需要| UIDesign[🎨 UI设计阶段]
|
||||
Q3 -->|不需要| Q4{任务复杂度?}
|
||||
|
||||
UIDesign --> Q3a{有参考设计吗?}
|
||||
Q3a -->|有| UIImitate[/workflow:ui-design:imitate-auto<br>--input 参考URL]
|
||||
Q3a -->|无| UIExplore[/workflow:ui-design:explore-auto<br>--prompt 设计描述]
|
||||
Q3a -->|有| UIImitate[/ /workflow:ui-design:imitate-auto<br>--input 参考URL /]
|
||||
Q3a -->|无| UIExplore[/ /workflow:ui-design:explore-auto<br>--prompt 设计描述 /]
|
||||
|
||||
UIImitate --> UISync[/workflow:ui-design:design-sync<br>同步设计系统]
|
||||
UIImitate --> UISync[/ /workflow:ui-design:design-sync<br>同步设计系统 /]
|
||||
UIExplore --> UISync
|
||||
UISync --> Q4
|
||||
|
||||
@@ -37,19 +37,19 @@ flowchart TD
|
||||
Q4 -->|复杂完整| FullPlan[📋 完整规划<br>/workflow:plan]
|
||||
|
||||
LitePlan --> Q5{需要代码探索?}
|
||||
Q5 -->|需要| LitePlanE[/workflow:lite-plan -e<br>任务描述]
|
||||
Q5 -->|不需要| LitePlanNormal[/workflow:lite-plan<br>任务描述]
|
||||
Q5 -->|需要| LitePlanE[/ /workflow:lite-plan -e<br>任务描述 /]
|
||||
Q5 -->|不需要| LitePlanNormal[/ /workflow:lite-plan<br>任务描述 /]
|
||||
|
||||
LitePlanE --> LiteConfirm[三维确认:<br>1️⃣ 任务批准<br>2️⃣ 执行方式<br>3️⃣ 代码审查]
|
||||
LitePlanNormal --> LiteConfirm
|
||||
|
||||
LiteConfirm --> Q6{选择执行方式}
|
||||
Q6 -->|Agent| LiteAgent[/workflow:lite-execute<br>使用@code-developer]
|
||||
Q6 -->|Agent| LiteAgent[/ /workflow:lite-execute<br>使用@code-developer /]
|
||||
Q6 -->|CLI工具| LiteCLI[CLI执行<br>Gemini/Qwen/Codex]
|
||||
Q6 -->|仅计划| UserImpl[用户手动实现]
|
||||
|
||||
FullPlan --> PlanVerify{验证计划质量?}
|
||||
PlanVerify -->|是| Verify[/workflow:action-plan-verify]
|
||||
PlanVerify -->|是| Verify[/ /workflow:action-plan-verify /]
|
||||
PlanVerify -->|否| Execute
|
||||
Verify --> Q7{验证通过?}
|
||||
Q7 -->|否| FixPlan[修复计划问题]
|
||||
@@ -63,16 +63,16 @@ flowchart TD
|
||||
Execute --> TestDecision
|
||||
|
||||
TestDecision{需要测试吗?}
|
||||
TestDecision -->|TDD模式| TDD[/workflow:tdd-plan<br>测试驱动开发]
|
||||
TestDecision -->|后置测试| TestGen[/workflow:test-gen<br>生成测试]
|
||||
TestDecision -->|已有测试| TestCycle[/workflow:test-cycle-execute<br>测试修复循环]
|
||||
TestDecision -->|TDD模式| TDD[/ /workflow:tdd-plan<br>测试驱动开发 /]
|
||||
TestDecision -->|后置测试| TestGen[/ /workflow:test-gen<br>生成测试 /]
|
||||
TestDecision -->|已有测试| TestCycle[/ /workflow:test-cycle-execute<br>测试修复循环 /]
|
||||
TestDecision -->|不需要| Review
|
||||
|
||||
TDD --> TDDExecute[/workflow:execute<br>Red-Green-Refactor]
|
||||
TDDExecute --> TDDVerify[/workflow:tdd-verify<br>验证TDD合规]
|
||||
TDD --> TDDExecute[/ /workflow:execute<br>Red-Green-Refactor /]
|
||||
TDDExecute --> TDDVerify[/ /workflow:tdd-verify<br>验证TDD合规 /]
|
||||
TDDVerify --> Review
|
||||
|
||||
TestGen --> TestExecute[/workflow:execute<br>执行测试任务]
|
||||
TestGen --> TestExecute[/ /workflow:execute<br>执行测试任务 /]
|
||||
TestExecute --> TestResult{测试通过?}
|
||||
TestResult -->|否| TestCycle
|
||||
TestResult -->|是| Review
|
||||
@@ -83,10 +83,10 @@ flowchart TD
|
||||
|
||||
Review[📝 审查阶段]
|
||||
Review --> Q8{需要专项审查?}
|
||||
Q8 -->|安全| SecurityReview[/workflow:review<br>--type security]
|
||||
Q8 -->|架构| ArchReview[/workflow:review<br>--type architecture]
|
||||
Q8 -->|质量| QualityReview[/workflow:review<br>--type quality]
|
||||
Q8 -->|综合| GeneralReview[/workflow:review<br>综合审查]
|
||||
Q8 -->|安全| SecurityReview[/ /workflow:review<br>--type security /]
|
||||
Q8 -->|架构| ArchReview[/ /workflow:review<br>--type architecture /]
|
||||
Q8 -->|质量| QualityReview[/ /workflow:review<br>--type quality /]
|
||||
Q8 -->|综合| GeneralReview[/ /workflow:review<br>综合审查 /]
|
||||
Q8 -->|不需要| Complete
|
||||
|
||||
SecurityReview --> Complete
|
||||
|
||||
Reference in New Issue
Block a user