mirror of
https://github.com/cexll/myclaude.git
synced 2026-02-06 02:34:09 +08:00
Compare commits
58 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
669b1d82ce | ||
|
|
a21c31fd89 | ||
|
|
773f133111 | ||
|
|
4f5d24531c | ||
|
|
cc24d43c8b | ||
|
|
27d4ac8afd | ||
|
|
2e5d12570d | ||
|
|
7c89c40e8f | ||
|
|
fa617d1599 | ||
|
|
90c630e30e | ||
|
|
25bbbc32a7 | ||
|
|
d8304bf2b9 | ||
|
|
7240e08900 | ||
|
|
e122d8ff25 | ||
|
|
6985a30a6a | ||
|
|
dd4c12b8e2 | ||
|
|
a88315d92d | ||
|
|
d1f13b3379 | ||
|
|
5d362852ab | ||
|
|
238c7b9a13 | ||
|
|
0986fa82ee | ||
|
|
a989ce343c | ||
|
|
abe0839249 | ||
|
|
d75c973f32 | ||
|
|
e7f329940b | ||
|
|
0fc5eaaa2d | ||
|
|
420eb857ff | ||
|
|
661656c587 | ||
|
|
ed4b088631 | ||
|
|
55a574280a | ||
|
|
8f05626075 | ||
|
|
4395c5785d | ||
|
|
b0d7a09ff2 | ||
|
|
f7aeaa5c7e | ||
|
|
c8f75faf84 | ||
|
|
b8b06257ff | ||
|
|
369a3319f9 | ||
|
|
75f08ab81f | ||
|
|
23282ef460 | ||
|
|
c7cb28a1da | ||
|
|
0a4982e96d | ||
|
|
17e52d78d2 | ||
|
|
55246ce9c4 | ||
|
|
890fec81bf | ||
|
|
81f298c2ea | ||
|
|
8ea6d10be5 | ||
|
|
bdf62d0f1c | ||
|
|
40e2d00d35 | ||
|
|
13465b12e5 | ||
|
|
cf93a0ada9 | ||
|
|
b81953a1d7 | ||
|
|
1d2f28101a | ||
|
|
81e95777a8 | ||
|
|
993249acb1 | ||
|
|
0d28e70026 | ||
|
|
7560ce1976 | ||
|
|
683d18e6bb | ||
|
|
a7147f692c |
@@ -1,209 +1,54 @@
|
||||
{
|
||||
"name": "claude-code-dev-workflows",
|
||||
"$schema": "https://anthropic.com/claude-code/marketplace.schema.json",
|
||||
"name": "myclaude",
|
||||
"version": "5.6.1",
|
||||
"description": "Professional multi-agent development workflows with OmO orchestration, Requirements-Driven and BMAD methodologies",
|
||||
"owner": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"email": "contact@example.com",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"metadata": {
|
||||
"description": "Professional multi-agent development workflows with Requirements-Driven and BMAD methodologies, featuring 16+ specialized agents and 12+ commands",
|
||||
"version": "1.0.0"
|
||||
"name": "cexll",
|
||||
"email": "evanxian9@gmail.com"
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
"name": "requirements-driven-development",
|
||||
"source": "./requirements-driven-workflow/",
|
||||
"description": "Streamlined requirements-driven development workflow with 90% quality gates for practical feature implementation",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"requirements",
|
||||
"workflow",
|
||||
"automation",
|
||||
"quality-gates",
|
||||
"feature-development",
|
||||
"agile",
|
||||
"specifications"
|
||||
],
|
||||
"category": "workflows",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/requirements-pilot.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/requirements-generate.md",
|
||||
"./agents/requirements-code.md",
|
||||
"./agents/requirements-testing.md",
|
||||
"./agents/requirements-review.md"
|
||||
]
|
||||
"name": "omo",
|
||||
"description": "Multi-agent orchestration for code analysis, bug investigation, fix planning, and implementation with intelligent routing to specialized agents",
|
||||
"version": "5.6.1",
|
||||
"source": "./skills/omo",
|
||||
"category": "development"
|
||||
},
|
||||
{
|
||||
"name": "bmad-agile-workflow",
|
||||
"source": "./bmad-agile-workflow/",
|
||||
"name": "dev",
|
||||
"description": "Lightweight development workflow with requirements clarification, parallel codex execution, and mandatory 90% test coverage",
|
||||
"version": "5.6.1",
|
||||
"source": "./dev-workflow",
|
||||
"category": "development"
|
||||
},
|
||||
{
|
||||
"name": "requirements",
|
||||
"description": "Requirements-driven development workflow with quality gates for practical feature implementation",
|
||||
"version": "5.6.1",
|
||||
"source": "./requirements-driven-workflow",
|
||||
"category": "development"
|
||||
},
|
||||
{
|
||||
"name": "bmad",
|
||||
"description": "Full BMAD agile workflow with role-based agents (PO, Architect, SM, Dev, QA) and interactive approval gates",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"bmad",
|
||||
"agile",
|
||||
"scrum",
|
||||
"product-owner",
|
||||
"architect",
|
||||
"developer",
|
||||
"qa",
|
||||
"workflow-orchestration"
|
||||
],
|
||||
"category": "workflows",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/bmad-pilot.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/bmad-po.md",
|
||||
"./agents/bmad-architect.md",
|
||||
"./agents/bmad-sm.md",
|
||||
"./agents/bmad-dev.md",
|
||||
"./agents/bmad-qa.md",
|
||||
"./agents/bmad-orchestrator.md",
|
||||
"./agents/bmad-review.md"
|
||||
]
|
||||
"version": "5.6.1",
|
||||
"source": "./bmad-agile-workflow",
|
||||
"category": "development"
|
||||
},
|
||||
{
|
||||
"name": "development-essentials",
|
||||
"source": "./development-essentials/",
|
||||
"name": "dev-kit",
|
||||
"description": "Essential development commands for coding, debugging, testing, optimization, and documentation",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"code",
|
||||
"debug",
|
||||
"test",
|
||||
"optimize",
|
||||
"review",
|
||||
"bugfix",
|
||||
"refactor",
|
||||
"documentation"
|
||||
],
|
||||
"category": "essentials",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/code.md",
|
||||
"./commands/debug.md",
|
||||
"./commands/test.md",
|
||||
"./commands/optimize.md",
|
||||
"./commands/review.md",
|
||||
"./commands/bugfix.md",
|
||||
"./commands/refactor.md",
|
||||
"./commands/docs.md",
|
||||
"./commands/ask.md",
|
||||
"./commands/think.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/code.md",
|
||||
"./agents/bugfix.md",
|
||||
"./agents/bugfix-verify.md",
|
||||
"./agents/optimize.md",
|
||||
"./agents/debug.md"
|
||||
]
|
||||
"version": "5.6.1",
|
||||
"source": "./development-essentials",
|
||||
"category": "productivity"
|
||||
},
|
||||
{
|
||||
"name": "codex-cli",
|
||||
"source": "./skills/codex/",
|
||||
"description": "Execute Codex CLI for code analysis, refactoring, and automated code changes with file references (@syntax) and structured output",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"codex",
|
||||
"code-analysis",
|
||||
"refactoring",
|
||||
"automation",
|
||||
"gpt-5",
|
||||
"ai-coding"
|
||||
],
|
||||
"category": "essentials",
|
||||
"strict": false,
|
||||
"skills": [
|
||||
"./SKILL.md"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "gemini-cli",
|
||||
"source": "./skills/gemini/",
|
||||
"description": "Execute Gemini CLI for AI-powered code analysis and generation with Google's latest Gemini models",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"gemini",
|
||||
"google-ai",
|
||||
"code-analysis",
|
||||
"code-generation",
|
||||
"ai-reasoning"
|
||||
],
|
||||
"category": "essentials",
|
||||
"strict": false,
|
||||
"skills": [
|
||||
"./SKILL.md"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dev-workflow",
|
||||
"source": "./dev-workflow/",
|
||||
"description": "Minimal lightweight development workflow with requirements clarification, parallel codex execution, and mandatory 90% test coverage",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"dev",
|
||||
"workflow",
|
||||
"codex",
|
||||
"testing",
|
||||
"coverage",
|
||||
"concurrent",
|
||||
"lightweight"
|
||||
],
|
||||
"category": "workflows",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/dev.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/dev-plan-generator.md"
|
||||
]
|
||||
"name": "sparv",
|
||||
"description": "Minimal SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point spec gate, unified journal, 2-action saves, 3-failure protocol, and EHRB risk detection",
|
||||
"version": "1.1.0",
|
||||
"source": "./skills/sparv",
|
||||
"category": "development"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.github/workflows/release.yml
vendored
2
.github/workflows/release.yml
vendored
@@ -74,7 +74,7 @@ jobs:
|
||||
if [ "${{ matrix.goos }}" = "windows" ]; then
|
||||
OUTPUT_NAME="${OUTPUT_NAME}.exe"
|
||||
fi
|
||||
go build -ldflags="-s -w -X main.version=${VERSION}" -o ${OUTPUT_NAME} .
|
||||
go build -ldflags="-s -w -X main.version=${VERSION}" -o ${OUTPUT_NAME} ./cmd/codeagent
|
||||
chmod +x ${OUTPUT_NAME}
|
||||
echo "artifact_path=codeagent-wrapper/${OUTPUT_NAME}" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -7,3 +7,4 @@
|
||||
__pycache__
|
||||
.coverage
|
||||
coverage.out
|
||||
references
|
||||
|
||||
60
CHANGELOG.md
60
CHANGELOG.md
@@ -2,6 +2,66 @@
|
||||
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
## [5.6.4] - 2026-01-15
|
||||
|
||||
### 🚀 Features
|
||||
|
||||
- add reasoning effort config for codex backend
|
||||
- default to skip-permissions and bypass-sandbox
|
||||
- add multi-agent support with yolo mode
|
||||
- add omo module for multi-agent orchestration
|
||||
- add intelligent backend selection based on task complexity (#61)
|
||||
- v5.4.0 structured execution report (#94)
|
||||
- add millisecond-precision timestamps to all log entries (#91)
|
||||
- skill-install install script and security scan
|
||||
- add uninstall scripts with selective module removal
|
||||
|
||||
### 🐛 Bug Fixes
|
||||
|
||||
- filter codex stderr noise logs
|
||||
- use config override for codex reasoning effort
|
||||
- propagate SkipPermissions to parallel tasks (#113)
|
||||
- add timeout for Windows process termination
|
||||
- reject dash as workdir parameter (#118)
|
||||
- add sleep in fake script to prevent CI race condition
|
||||
- fix gemini env load
|
||||
- fix omo
|
||||
- fix codeagent skill TaskOutput
|
||||
- 修复 Gemini init 事件 session_id 未提取的问题 (#111)
|
||||
- Windows 后端退出:taskkill 结束进程树 + turn.completed 支持 (#108)
|
||||
- support model parameter for all backends, auto-inject from settings (#105)
|
||||
- replace setx with reg add to avoid 1024-char PATH truncation (#101)
|
||||
- 移除未知事件格式的日志噪声 (#96)
|
||||
- prevent duplicate PATH entries on reinstall (#95)
|
||||
- Minor issues #12 and #13 - ASCII mode and performance optimization
|
||||
- correct settings.json filename and bump version to v5.2.8
|
||||
- allow claude backend to read env from setting.json while preventing recursion (#92)
|
||||
- comprehensive security and quality improvements for PR #85 & #87 (#90)
|
||||
- Improve backend termination after message and extend timeout (#86)
|
||||
- Parser重复解析优化 + 严重bug修复 + PR #86兼容性 (#88)
|
||||
- filter noisy stderr output from gemini backend (#83)
|
||||
- 修復 wsl install.sh 格式問題 (#78)
|
||||
- 修复多 backend 并行日志 PID 混乱并移除包装格式 (#74) (#76)
|
||||
|
||||
### 🚜 Refactor
|
||||
|
||||
- remove sisyphus agent and unused code
|
||||
- streamline agent documentation and remove sisyphus
|
||||
|
||||
### 📚 Documentation
|
||||
|
||||
- add OmO workflow to README and fix plugin marketplace structure
|
||||
- update FAQ for default bypass/skip-permissions behavior
|
||||
- 添加 FAQ 常见问题章节
|
||||
- update troubleshooting with idempotent PATH commands (#95)
|
||||
|
||||
### 💼 Other
|
||||
|
||||
- add test-cases skill
|
||||
- add browser skill
|
||||
- BMADh和Requirements-Driven支持根据语义生成对应的文档 (#82)
|
||||
- update all readme
|
||||
|
||||
## [5.2.4] - 2025-12-16
|
||||
|
||||
|
||||
|
||||
157
README.md
157
README.md
@@ -7,7 +7,7 @@
|
||||
|
||||
[](https://www.gnu.org/licenses/agpl-3.0)
|
||||
[](https://claude.ai/code)
|
||||
[](https://github.com/cexll/myclaude)
|
||||
[](https://github.com/cexll/myclaude)
|
||||
|
||||
> AI-powered development automation with multi-backend execution (Codex/Claude/Gemini)
|
||||
|
||||
@@ -35,6 +35,41 @@ python3 install.py --install-dir ~/.claude
|
||||
|
||||
## Workflows Overview
|
||||
|
||||
### 0. OmO Multi-Agent Orchestrator (Recommended for Complex Tasks)
|
||||
|
||||
**Intelligent multi-agent orchestration that routes tasks to specialized agents based on risk signals.**
|
||||
|
||||
```bash
|
||||
/omo "analyze and fix this authentication bug"
|
||||
```
|
||||
|
||||
**Agent Hierarchy:**
|
||||
| Agent | Role | Backend | Model |
|
||||
|-------|------|---------|-------|
|
||||
| `oracle` | Technical advisor | Claude | claude-opus-4-5 |
|
||||
| `librarian` | External research | Claude | claude-sonnet-4-5 |
|
||||
| `explore` | Codebase search | OpenCode | grok-code |
|
||||
| `develop` | Code implementation | Codex | gpt-5.2 |
|
||||
| `frontend-ui-ux-engineer` | UI/UX specialist | Gemini | gemini-3-pro |
|
||||
| `document-writer` | Documentation | Gemini | gemini-3-flash |
|
||||
|
||||
**Routing Signals (Not Fixed Pipeline):**
|
||||
- Code location unclear → `explore`
|
||||
- External library/API → `librarian`
|
||||
- Risky/multi-file change → `oracle`
|
||||
- Implementation needed → `develop` / `frontend-ui-ux-engineer`
|
||||
|
||||
**Common Recipes:**
|
||||
- Explain code: `explore`
|
||||
- Small fix with known location: `develop` directly
|
||||
- Bug fix, location unknown: `explore → develop`
|
||||
- Cross-cutting refactor: `explore → oracle → develop`
|
||||
- External API integration: `explore + librarian → oracle → develop`
|
||||
|
||||
**Best For:** Complex bug investigation, multi-file refactoring, architecture decisions
|
||||
|
||||
---
|
||||
|
||||
### 1. Dev Workflow (Recommended)
|
||||
|
||||
**The primary workflow for most development tasks.**
|
||||
@@ -160,7 +195,7 @@ Required features:
|
||||
- `-p` - Prompt input flag
|
||||
- `-r <session_id>` - Resume sessions
|
||||
|
||||
**Security Note:** The wrapper only adds `--dangerously-skip-permissions` for Claude when explicitly enabled (e.g. `--skip-permissions` / `CODEAGENT_SKIP_PERMISSIONS=true`). Keep it disabled unless you understand the risk.
|
||||
**Security Note:** The wrapper adds `--dangerously-skip-permissions` for Claude by default. Set `CODEAGENT_SKIP_PERMISSIONS=false` to disable if you need permission prompts.
|
||||
|
||||
**Verify Claude CLI is installed:**
|
||||
```bash
|
||||
@@ -346,8 +381,10 @@ $Env:PATH = "$HOME\bin;$Env:PATH"
|
||||
```
|
||||
|
||||
```batch
|
||||
REM cmd.exe - persistent for current user
|
||||
setx PATH "%USERPROFILE%\bin;%PATH%"
|
||||
REM cmd.exe - persistent for current user (use PowerShell method above instead)
|
||||
REM WARNING: This expands %PATH% which includes system PATH, causing duplication
|
||||
REM Note: Using reg add instead of setx to avoid 1024-character truncation limit
|
||||
reg add "HKCU\Environment" /v Path /t REG_EXPAND_SZ /d "%USERPROFILE%\bin;%PATH%" /f
|
||||
```
|
||||
|
||||
---
|
||||
@@ -371,11 +408,14 @@ setx PATH "%USERPROFILE%\bin;%PATH%"
|
||||
|
||||
**Codex wrapper not found:**
|
||||
```bash
|
||||
# Check PATH
|
||||
echo $PATH | grep -q "$HOME/.claude/bin" || echo 'export PATH="$HOME/.claude/bin:$PATH"' >> ~/.zshrc
|
||||
# Installer auto-adds PATH, check if configured
|
||||
if [[ ":$PATH:" != *":$HOME/.claude/bin:"* ]]; then
|
||||
echo "PATH not configured. Reinstalling..."
|
||||
bash install.sh
|
||||
fi
|
||||
|
||||
# Reinstall
|
||||
bash install.sh
|
||||
# Or manually add (idempotent command)
|
||||
[[ ":$PATH:" != *":$HOME/.claude/bin:"* ]] && echo 'export PATH="$HOME/.claude/bin:$PATH"' >> ~/.zshrc
|
||||
```
|
||||
|
||||
**Permission denied:**
|
||||
@@ -459,9 +499,106 @@ claude -r <session_id> "test"
|
||||
|
||||
---
|
||||
|
||||
## Documentation
|
||||
## FAQ (Frequently Asked Questions)
|
||||
|
||||
### Core Guides
|
||||
### Q1: `codeagent-wrapper` execution fails with "Unknown event format"
|
||||
|
||||
**Problem:**
|
||||
```
|
||||
Unknown event format: {"type":"turn.started"}
|
||||
Unknown event format: {"type":"assistant", ...}
|
||||
```
|
||||
|
||||
**Solution:**
|
||||
This is a logging event format display issue and does not affect actual functionality. It will be fixed in the next version. You can ignore these log outputs.
|
||||
|
||||
**Related Issue:** [#96](https://github.com/cexll/myclaude/issues/96)
|
||||
|
||||
---
|
||||
|
||||
### Q2: Gemini cannot read files ignored by `.gitignore`
|
||||
|
||||
**Problem:**
|
||||
When using `codeagent-wrapper --backend gemini`, files in directories like `.claude/` that are ignored by `.gitignore` cannot be read.
|
||||
|
||||
**Solution:**
|
||||
- **Option 1:** Remove `.claude/` from your `.gitignore` file
|
||||
- **Option 2:** Ensure files that need to be read are not in `.gitignore` list
|
||||
|
||||
**Related Issue:** [#75](https://github.com/cexll/myclaude/issues/75)
|
||||
|
||||
---
|
||||
|
||||
### Q3: `/dev` command parallel execution is very slow
|
||||
|
||||
**Problem:**
|
||||
Using `/dev` command for simple features takes too long (over 30 minutes) with no visibility into task progress.
|
||||
|
||||
**Solution:**
|
||||
1. **Check logs:** Review `C:\Users\User\AppData\Local\Temp\codeagent-wrapper-*.log` to identify bottlenecks
|
||||
2. **Adjust backend:**
|
||||
- Try faster models like `gpt-5.1-codex-max`
|
||||
- Running in WSL may be significantly faster
|
||||
3. **Workspace:** Use a single repository instead of monorepo with multiple sub-projects
|
||||
|
||||
**Related Issue:** [#77](https://github.com/cexll/myclaude/issues/77)
|
||||
|
||||
---
|
||||
|
||||
### Q4: Codex permission denied with new Go version
|
||||
|
||||
**Problem:**
|
||||
After upgrading to the new Go-based Codex implementation, execution fails with permission denied errors.
|
||||
|
||||
**Solution:**
|
||||
Add the following configuration to `~/.codex/config.yaml` (Windows: `c:\user\.codex\config.toml`):
|
||||
```yaml
|
||||
model = "gpt-5.1-codex-max"
|
||||
model_reasoning_effort = "high"
|
||||
model_reasoning_summary = "detailed"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
disable_response_storage = true
|
||||
network_access = true
|
||||
```
|
||||
|
||||
**Key settings:**
|
||||
- `approval_policy = "never"` - Remove approval restrictions
|
||||
- `sandbox_mode = "workspace-write"` - Allow workspace write access
|
||||
- `network_access = true` - Enable network access
|
||||
|
||||
**Related Issue:** [#31](https://github.com/cexll/myclaude/issues/31)
|
||||
|
||||
---
|
||||
|
||||
### Q5: How to disable default bypass/skip-permissions mode
|
||||
|
||||
**Background:**
|
||||
By default, codeagent-wrapper enables bypass mode for both Codex and Claude backends:
|
||||
- `CODEX_BYPASS_SANDBOX=true` - Bypasses Codex sandbox restrictions
|
||||
- `CODEAGENT_SKIP_PERMISSIONS=true` - Skips Claude permission prompts
|
||||
|
||||
**To disable (if you need sandbox/permission protection):**
|
||||
```bash
|
||||
export CODEX_BYPASS_SANDBOX=false
|
||||
export CODEAGENT_SKIP_PERMISSIONS=false
|
||||
```
|
||||
|
||||
Or add to your shell profile (`~/.zshrc` or `~/.bashrc`):
|
||||
```bash
|
||||
echo 'export CODEX_BYPASS_SANDBOX=false' >> ~/.zshrc
|
||||
echo 'export CODEAGENT_SKIP_PERMISSIONS=false' >> ~/.zshrc
|
||||
```
|
||||
|
||||
**Note:** Disabling bypass mode will require manual approval for certain operations.
|
||||
|
||||
---
|
||||
|
||||
**Still having issues?** Visit [GitHub Issues](https://github.com/cexll/myclaude/issues) to search or report new issues.
|
||||
|
||||
---
|
||||
|
||||
## Documentation
|
||||
- **[Codeagent-Wrapper Guide](docs/CODEAGENT-WRAPPER.md)** - Multi-backend execution wrapper
|
||||
- **[Hooks Documentation](docs/HOOKS.md)** - Custom hooks and automation
|
||||
|
||||
|
||||
153
README_CN.md
153
README_CN.md
@@ -2,7 +2,7 @@
|
||||
|
||||
[](https://www.gnu.org/licenses/agpl-3.0)
|
||||
[](https://claude.ai/code)
|
||||
[](https://github.com/cexll/myclaude)
|
||||
[](https://github.com/cexll/myclaude)
|
||||
|
||||
> AI 驱动的开发自动化 - 多后端执行架构 (Codex/Claude/Gemini)
|
||||
|
||||
@@ -30,6 +30,41 @@ python3 install.py --install-dir ~/.claude
|
||||
|
||||
## 工作流概览
|
||||
|
||||
### 0. OmO 多智能体编排器(复杂任务推荐)
|
||||
|
||||
**基于风险信号智能路由任务到专业智能体的多智能体编排系统。**
|
||||
|
||||
```bash
|
||||
/omo "分析并修复这个认证 bug"
|
||||
```
|
||||
|
||||
**智能体层级:**
|
||||
| 智能体 | 角色 | 后端 | 模型 |
|
||||
|-------|------|------|------|
|
||||
| `oracle` | 技术顾问 | Claude | claude-opus-4-5 |
|
||||
| `librarian` | 外部研究 | Claude | claude-sonnet-4-5 |
|
||||
| `explore` | 代码库搜索 | OpenCode | grok-code |
|
||||
| `develop` | 代码实现 | Codex | gpt-5.2 |
|
||||
| `frontend-ui-ux-engineer` | UI/UX 专家 | Gemini | gemini-3-pro |
|
||||
| `document-writer` | 文档撰写 | Gemini | gemini-3-flash |
|
||||
|
||||
**路由信号(非固定流水线):**
|
||||
- 代码位置不明确 → `explore`
|
||||
- 外部库/API → `librarian`
|
||||
- 高风险/多文件变更 → `oracle`
|
||||
- 需要实现 → `develop` / `frontend-ui-ux-engineer`
|
||||
|
||||
**常用配方:**
|
||||
- 解释代码:`explore`
|
||||
- 位置已知的小修复:直接 `develop`
|
||||
- Bug 修复,位置未知:`explore → develop`
|
||||
- 跨模块重构:`explore → oracle → develop`
|
||||
- 外部 API 集成:`explore + librarian → oracle → develop`
|
||||
|
||||
**适用场景:** 复杂 bug 调查、多文件重构、架构决策
|
||||
|
||||
---
|
||||
|
||||
### 1. Dev 工作流(推荐)
|
||||
|
||||
**大多数开发任务的首选工作流。**
|
||||
@@ -282,8 +317,10 @@ $Env:PATH = "$HOME\bin;$Env:PATH"
|
||||
```
|
||||
|
||||
```batch
|
||||
REM cmd.exe - 永久添加(当前用户)
|
||||
setx PATH "%USERPROFILE%\bin;%PATH%"
|
||||
REM cmd.exe - 永久添加(当前用户)(建议使用上面的 PowerShell 方法)
|
||||
REM 警告:此命令会展开 %PATH% 包含系统 PATH,导致重复
|
||||
REM 注意:使用 reg add 而非 setx 以避免 1024 字符截断限制
|
||||
reg add "HKCU\Environment" /v Path /t REG_EXPAND_SZ /d "%USERPROFILE%\bin;%PATH%" /f
|
||||
```
|
||||
|
||||
---
|
||||
@@ -307,11 +344,14 @@ setx PATH "%USERPROFILE%\bin;%PATH%"
|
||||
|
||||
**Codex wrapper 未找到:**
|
||||
```bash
|
||||
# 检查 PATH
|
||||
echo $PATH | grep -q "$HOME/.claude/bin" || echo 'export PATH="$HOME/.claude/bin:$PATH"' >> ~/.zshrc
|
||||
# 安装程序会自动添加 PATH,检查是否已添加
|
||||
if [[ ":$PATH:" != *":$HOME/.claude/bin:"* ]]; then
|
||||
echo "PATH not configured. Reinstalling..."
|
||||
bash install.sh
|
||||
fi
|
||||
|
||||
# 重新安装
|
||||
bash install.sh
|
||||
# 或手动添加(幂等性命令)
|
||||
[[ ":$PATH:" != *":$HOME/.claude/bin:"* ]] && echo 'export PATH="$HOME/.claude/bin:$PATH"' >> ~/.zshrc
|
||||
```
|
||||
|
||||
**权限被拒绝:**
|
||||
@@ -330,6 +370,105 @@ python3 install.py --module dev --force
|
||||
|
||||
---
|
||||
|
||||
## 常见问题 (FAQ)
|
||||
|
||||
### Q1: `codeagent-wrapper` 执行时报错 "Unknown event format"
|
||||
|
||||
**问题描述:**
|
||||
执行 `codeagent-wrapper` 时出现错误:
|
||||
```
|
||||
Unknown event format: {"type":"turn.started"}
|
||||
Unknown event format: {"type":"assistant", ...}
|
||||
```
|
||||
|
||||
**解决方案:**
|
||||
这是日志事件流的显示问题,不影响实际功能执行。预计在下个版本中修复。如需排查其他问题,可忽略此日志输出。
|
||||
|
||||
**相关 Issue:** [#96](https://github.com/cexll/myclaude/issues/96)
|
||||
|
||||
---
|
||||
|
||||
### Q2: Gemini 无法读取 `.gitignore` 忽略的文件
|
||||
|
||||
**问题描述:**
|
||||
使用 `codeagent-wrapper --backend gemini` 时,无法读取 `.claude/` 等被 `.gitignore` 忽略的目录中的文件。
|
||||
|
||||
**解决方案:**
|
||||
- **方案一:** 在项目根目录的 `.gitignore` 中取消对 `.claude/` 的忽略
|
||||
- **方案二:** 确保需要读取的文件不在 `.gitignore` 忽略列表中
|
||||
|
||||
**相关 Issue:** [#75](https://github.com/cexll/myclaude/issues/75)
|
||||
|
||||
---
|
||||
|
||||
### Q3: `/dev` 命令并行执行特别慢
|
||||
|
||||
**问题描述:**
|
||||
使用 `/dev` 命令开发简单功能耗时过长(超过30分钟),无法了解任务执行状态。
|
||||
|
||||
**解决方案:**
|
||||
1. **检查日志:** 查看 `C:\Users\User\AppData\Local\Temp\codeagent-wrapper-*.log` 分析瓶颈
|
||||
2. **调整后端:**
|
||||
- 尝试使用 `gpt-5.1-codex-max` 等更快的模型
|
||||
- 在 WSL 环境下运行速度可能更快
|
||||
3. **工作区选择:** 使用独立的代码仓库而非包含多个子项目的 monorepo
|
||||
|
||||
**相关 Issue:** [#77](https://github.com/cexll/myclaude/issues/77)
|
||||
|
||||
---
|
||||
|
||||
### Q4: 新版 Go 实现的 Codex 权限不足
|
||||
|
||||
**问题描述:**
|
||||
升级到新版 Go 实现的 Codex 后,出现权限不足的错误。
|
||||
|
||||
**解决方案:**
|
||||
在 `~/.codex/config.yaml` 中添加以下配置(Windows: `c:\user\.codex\config.toml`):
|
||||
```yaml
|
||||
model = "gpt-5.1-codex-max"
|
||||
model_reasoning_effort = "high"
|
||||
model_reasoning_summary = "detailed"
|
||||
approval_policy = "never"
|
||||
sandbox_mode = "workspace-write"
|
||||
disable_response_storage = true
|
||||
network_access = true
|
||||
```
|
||||
|
||||
**关键配置说明:**
|
||||
- `approval_policy = "never"` - 移除审批限制
|
||||
- `sandbox_mode = "workspace-write"` - 允许工作区写入权限
|
||||
- `network_access = true` - 启用网络访问
|
||||
|
||||
**相关 Issue:** [#31](https://github.com/cexll/myclaude/issues/31)
|
||||
|
||||
---
|
||||
|
||||
### Q5: 执行时遇到权限拒绝或沙箱限制
|
||||
|
||||
**问题描述:**
|
||||
运行 codeagent-wrapper 时出现权限错误或沙箱限制。
|
||||
|
||||
**解决方案:**
|
||||
设置以下环境变量:
|
||||
```bash
|
||||
export CODEX_BYPASS_SANDBOX=true
|
||||
export CODEAGENT_SKIP_PERMISSIONS=true
|
||||
```
|
||||
|
||||
或添加到 shell 配置文件(`~/.zshrc` 或 `~/.bashrc`):
|
||||
```bash
|
||||
echo 'export CODEX_BYPASS_SANDBOX=true' >> ~/.zshrc
|
||||
echo 'export CODEAGENT_SKIP_PERMISSIONS=true' >> ~/.zshrc
|
||||
```
|
||||
|
||||
**注意:** 这些设置会绕过安全限制,请仅在可信环境中使用。
|
||||
|
||||
---
|
||||
|
||||
**仍有疑问?** 请访问 [GitHub Issues](https://github.com/cexll/myclaude/issues) 搜索或提交新问题。
|
||||
|
||||
---
|
||||
|
||||
## 许可证
|
||||
|
||||
AGPL-3.0 License - 查看 [LICENSE](LICENSE)
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
{
|
||||
"name": "bmad-agile-workflow",
|
||||
"source": "./",
|
||||
"description": "Full BMAD agile workflow with role-based agents (PO, Architect, SM, Dev, QA) and interactive approval gates",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"bmad",
|
||||
"agile",
|
||||
"scrum",
|
||||
"product-owner",
|
||||
"architect",
|
||||
"developer",
|
||||
"qa",
|
||||
"workflow-orchestration"
|
||||
],
|
||||
"category": "workflows",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/bmad-pilot.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/bmad-po.md",
|
||||
"./agents/bmad-architect.md",
|
||||
"./agents/bmad-sm.md",
|
||||
"./agents/bmad-dev.md",
|
||||
"./agents/bmad-qa.md",
|
||||
"./agents/bmad-orchestrator.md",
|
||||
"./agents/bmad-review.md"
|
||||
]
|
||||
}
|
||||
9
bmad-agile-workflow/.claude-plugin/plugin.json
Normal file
9
bmad-agile-workflow/.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "bmad",
|
||||
"description": "Full BMAD agile workflow with role-based agents (PO, Architect, SM, Dev, QA) and interactive approval gates",
|
||||
"version": "5.6.1",
|
||||
"author": {
|
||||
"name": "cexll",
|
||||
"email": "cexll@cexll.com"
|
||||
}
|
||||
}
|
||||
39
codeagent-wrapper/.github/workflows/ci.yml
vendored
Normal file
39
codeagent-wrapper/.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main, master]
|
||||
pull_request:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
go-version: ["1.21", "1.22"]
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
cache: true
|
||||
- name: Test
|
||||
run: make test
|
||||
- name: Build
|
||||
run: make build
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: "1.22"
|
||||
cache: true
|
||||
- name: Lint
|
||||
run: make lint
|
||||
|
||||
12
codeagent-wrapper/.gitignore
vendored
12
codeagent-wrapper/.gitignore
vendored
@@ -1,4 +1,7 @@
|
||||
# Build artifacts
|
||||
bin/
|
||||
codeagent
|
||||
codeagent.exe
|
||||
codeagent-wrapper
|
||||
codeagent-wrapper.exe
|
||||
*.test
|
||||
@@ -9,3 +12,12 @@ coverage*.out
|
||||
cover.out
|
||||
cover_*.out
|
||||
coverage.html
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
|
||||
# Temp files
|
||||
*.tmp
|
||||
*.swp
|
||||
*~
|
||||
.DS_Store
|
||||
|
||||
38
codeagent-wrapper/Makefile
Normal file
38
codeagent-wrapper/Makefile
Normal file
@@ -0,0 +1,38 @@
|
||||
GO ?= go
|
||||
|
||||
BINARY ?= codeagent
|
||||
CMD_PKG := ./cmd/codeagent
|
||||
|
||||
TOOLS_BIN := $(CURDIR)/bin
|
||||
TOOLCHAIN ?= go1.22.0
|
||||
GOLANGCI_LINT_VERSION := v1.56.2
|
||||
STATICCHECK_VERSION := v0.4.7
|
||||
|
||||
GOLANGCI_LINT := $(TOOLS_BIN)/golangci-lint
|
||||
STATICCHECK := $(TOOLS_BIN)/staticcheck
|
||||
|
||||
.PHONY: build test lint clean install
|
||||
|
||||
build:
|
||||
$(GO) build -o $(BINARY) $(CMD_PKG)
|
||||
|
||||
test:
|
||||
$(GO) test ./...
|
||||
|
||||
$(GOLANGCI_LINT):
|
||||
@mkdir -p $(TOOLS_BIN)
|
||||
GOTOOLCHAIN=$(TOOLCHAIN) GOBIN=$(TOOLS_BIN) $(GO) install github.com/golangci/golangci-lint/cmd/golangci-lint@$(GOLANGCI_LINT_VERSION)
|
||||
|
||||
$(STATICCHECK):
|
||||
@mkdir -p $(TOOLS_BIN)
|
||||
GOTOOLCHAIN=$(TOOLCHAIN) GOBIN=$(TOOLS_BIN) $(GO) install honnef.co/go/tools/cmd/staticcheck@$(STATICCHECK_VERSION)
|
||||
|
||||
lint: $(GOLANGCI_LINT) $(STATICCHECK)
|
||||
GOTOOLCHAIN=$(TOOLCHAIN) $(GOLANGCI_LINT) run ./...
|
||||
GOTOOLCHAIN=$(TOOLCHAIN) $(STATICCHECK) ./...
|
||||
|
||||
clean:
|
||||
@python3 -c 'import glob, os; paths=["codeagent","codeagent.exe","codeagent-wrapper","codeagent-wrapper.exe","coverage.out","cover.out","coverage.html"]; paths += glob.glob("coverage*.out") + glob.glob("cover_*.out") + glob.glob("*.test"); [os.remove(p) for p in paths if os.path.exists(p)]'
|
||||
|
||||
install:
|
||||
$(GO) install $(CMD_PKG)
|
||||
151
codeagent-wrapper/README.md
Normal file
151
codeagent-wrapper/README.md
Normal file
@@ -0,0 +1,151 @@
|
||||
# codeagent-wrapper
|
||||
|
||||
`codeagent-wrapper` 是一个用 Go 编写的“多后端 AI 代码代理”命令行包装器:用统一的 CLI 入口封装不同的 AI 工具后端(Codex / Claude / Gemini / Opencode),并提供一致的参数、配置与会话恢复体验。
|
||||
|
||||
入口:`cmd/codeagent/main.go`(生成二进制名:`codeagent`)。
|
||||
|
||||
## 功能特性
|
||||
|
||||
- 多后端支持:`codex` / `claude` / `gemini` / `opencode`
|
||||
- 统一命令行:`codeagent [flags] <task>` / `codeagent resume <session_id> <task> [workdir]`
|
||||
- 自动 stdin:遇到换行/特殊字符/超长任务自动走 stdin,避免 shell quoting 地狱;也可显式使用 `-`
|
||||
- 配置合并:支持配置文件与 `CODEAGENT_*` 环境变量(viper)
|
||||
- Agent 预设:从 `~/.codeagent/models.json` 读取 backend/model/prompt 等预设
|
||||
- 并行执行:`--parallel` 从 stdin 读取多任务配置,支持依赖拓扑并发执行
|
||||
- 日志清理:`codeagent cleanup` 清理旧日志(日志写入系统临时目录)
|
||||
|
||||
## 安装
|
||||
|
||||
要求:Go 1.21+。
|
||||
|
||||
在仓库根目录执行:
|
||||
|
||||
```bash
|
||||
go install ./cmd/codeagent
|
||||
```
|
||||
|
||||
安装后确认:
|
||||
|
||||
```bash
|
||||
codeagent version
|
||||
```
|
||||
|
||||
## 使用示例
|
||||
|
||||
最简单用法(默认后端:`codex`):
|
||||
|
||||
```bash
|
||||
codeagent "分析 internal/app/cli.go 的入口逻辑,给出改进建议"
|
||||
```
|
||||
|
||||
指定后端:
|
||||
|
||||
```bash
|
||||
codeagent --backend claude "解释 internal/executor/parallel_config.go 的并行配置格式"
|
||||
```
|
||||
|
||||
指定工作目录(第 2 个位置参数):
|
||||
|
||||
```bash
|
||||
codeagent "在当前 repo 下搜索潜在数据竞争" .
|
||||
```
|
||||
|
||||
显式从 stdin 读取 task(使用 `-`):
|
||||
|
||||
```bash
|
||||
cat task.txt | codeagent -
|
||||
```
|
||||
|
||||
恢复会话:
|
||||
|
||||
```bash
|
||||
codeagent resume <session_id> "继续上次任务"
|
||||
```
|
||||
|
||||
并行模式(从 stdin 读取任务配置;禁止位置参数):
|
||||
|
||||
```bash
|
||||
codeagent --parallel <<'EOF'
|
||||
---TASK---
|
||||
id: t1
|
||||
workdir: .
|
||||
backend: codex
|
||||
---CONTENT---
|
||||
列出本项目的主要模块以及它们的职责。
|
||||
---TASK---
|
||||
id: t2
|
||||
dependencies: t1
|
||||
backend: claude
|
||||
---CONTENT---
|
||||
基于 t1 的结论,提出重构风险点与建议。
|
||||
EOF
|
||||
```
|
||||
|
||||
## 配置说明
|
||||
|
||||
### 配置文件
|
||||
|
||||
默认查找路径(当 `--config` 为空时):
|
||||
|
||||
- `$HOME/.codeagent/config.(yaml|yml|json|toml|...)`
|
||||
|
||||
示例(YAML):
|
||||
|
||||
```yaml
|
||||
backend: codex
|
||||
model: gpt-4.1
|
||||
skip-permissions: false
|
||||
```
|
||||
|
||||
也可以通过 `--config /path/to/config.yaml` 显式指定。
|
||||
|
||||
### 环境变量(`CODEAGENT_*`)
|
||||
|
||||
通过 viper 读取并自动映射 `-` 为 `_`,常用项:
|
||||
|
||||
- `CODEAGENT_BACKEND`(`codex|claude|gemini|opencode`)
|
||||
- `CODEAGENT_MODEL`
|
||||
- `CODEAGENT_AGENT`
|
||||
- `CODEAGENT_PROMPT_FILE`
|
||||
- `CODEAGENT_REASONING_EFFORT`
|
||||
- `CODEAGENT_SKIP_PERMISSIONS`
|
||||
- `CODEAGENT_FULL_OUTPUT`(并行模式 legacy 输出)
|
||||
- `CODEAGENT_MAX_PARALLEL_WORKERS`(0 表示不限制,上限 100)
|
||||
|
||||
### Agent 预设(`~/.codeagent/models.json`)
|
||||
|
||||
可在 `~/.codeagent/models.json` 定义 agent → backend/model/prompt 等映射,用 `--agent <name>` 选择:
|
||||
|
||||
```json
|
||||
{
|
||||
"default_backend": "opencode",
|
||||
"default_model": "opencode/grok-code",
|
||||
"agents": {
|
||||
"develop": {
|
||||
"backend": "codex",
|
||||
"model": "gpt-4.1",
|
||||
"prompt_file": "~/.codeagent/prompts/develop.md",
|
||||
"description": "Code development"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 支持的后端
|
||||
|
||||
该项目本身不内置模型能力,依赖你本机安装并可在 `PATH` 中找到对应 CLI:
|
||||
|
||||
- `codex`:执行 `codex e ...`(默认会添加 `--dangerously-bypass-approvals-and-sandbox`;如需关闭请设置 `CODEX_BYPASS_SANDBOX=false`)
|
||||
- `claude`:执行 `claude -p ... --output-format stream-json`(默认会跳过权限提示;如需开启请设置 `CODEAGENT_SKIP_PERMISSIONS=false`)
|
||||
- `gemini`:执行 `gemini ... -o stream-json`(可从 `~/.gemini/.env` 加载环境变量)
|
||||
- `opencode`:执行 `opencode run --format json`
|
||||
|
||||
## 开发
|
||||
|
||||
```bash
|
||||
make build
|
||||
make test
|
||||
make lint
|
||||
make clean
|
||||
```
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
// Backend defines the contract for invoking different AI CLI backends.
|
||||
// Each backend is responsible for supplying the executable command and
|
||||
// building the argument list based on the wrapper config.
|
||||
type Backend interface {
|
||||
Name() string
|
||||
BuildArgs(cfg *Config, targetArg string) []string
|
||||
Command() string
|
||||
}
|
||||
|
||||
type CodexBackend struct{}
|
||||
|
||||
func (CodexBackend) Name() string { return "codex" }
|
||||
func (CodexBackend) Command() string {
|
||||
return "codex"
|
||||
}
|
||||
func (CodexBackend) BuildArgs(cfg *Config, targetArg string) []string {
|
||||
return buildCodexArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
type ClaudeBackend struct{}
|
||||
|
||||
func (ClaudeBackend) Name() string { return "claude" }
|
||||
func (ClaudeBackend) Command() string {
|
||||
return "claude"
|
||||
}
|
||||
func (ClaudeBackend) BuildArgs(cfg *Config, targetArg string) []string {
|
||||
return buildClaudeArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
const maxClaudeSettingsBytes = 1 << 20 // 1MB
|
||||
|
||||
// loadMinimalEnvSettings 从 ~/.claude/settings.json 只提取 env 配置。
|
||||
// 只接受字符串类型的值;文件缺失/解析失败/超限都返回空。
|
||||
func loadMinimalEnvSettings() map[string]string {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || home == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
settingPath := filepath.Join(home, ".claude", "settings.json")
|
||||
info, err := os.Stat(settingPath)
|
||||
if err != nil || info.Size() > maxClaudeSettingsBytes {
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(settingPath)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
var cfg struct {
|
||||
Env map[string]any `json:"env"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &cfg); err != nil {
|
||||
return nil
|
||||
}
|
||||
if len(cfg.Env) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
env := make(map[string]string, len(cfg.Env))
|
||||
for k, v := range cfg.Env {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
env[k] = s
|
||||
}
|
||||
if len(env) == 0 {
|
||||
return nil
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
func buildClaudeArgs(cfg *Config, targetArg string) []string {
|
||||
if cfg == nil {
|
||||
return nil
|
||||
}
|
||||
args := []string{"-p"}
|
||||
if cfg.SkipPermissions {
|
||||
args = append(args, "--dangerously-skip-permissions")
|
||||
}
|
||||
|
||||
// Prevent infinite recursion: disable all setting sources (user, project, local)
|
||||
// This ensures a clean execution environment without CLAUDE.md or skills that would trigger codeagent
|
||||
args = append(args, "--setting-sources", "")
|
||||
|
||||
if cfg.Mode == "resume" {
|
||||
if cfg.SessionID != "" {
|
||||
// Claude CLI uses -r <session_id> for resume.
|
||||
args = append(args, "-r", cfg.SessionID)
|
||||
}
|
||||
}
|
||||
// Note: claude CLI doesn't support -C flag; workdir set via cmd.Dir
|
||||
|
||||
args = append(args, "--output-format", "stream-json", "--verbose", targetArg)
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
type GeminiBackend struct{}
|
||||
|
||||
func (GeminiBackend) Name() string { return "gemini" }
|
||||
func (GeminiBackend) Command() string {
|
||||
return "gemini"
|
||||
}
|
||||
func (GeminiBackend) BuildArgs(cfg *Config, targetArg string) []string {
|
||||
return buildGeminiArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
func buildGeminiArgs(cfg *Config, targetArg string) []string {
|
||||
if cfg == nil {
|
||||
return nil
|
||||
}
|
||||
args := []string{"-o", "stream-json", "-y"}
|
||||
|
||||
if cfg.Mode == "resume" {
|
||||
if cfg.SessionID != "" {
|
||||
args = append(args, "-r", cfg.SessionID)
|
||||
}
|
||||
}
|
||||
// Note: gemini CLI doesn't support -C flag; workdir set via cmd.Dir
|
||||
|
||||
args = append(args, "-p", targetArg)
|
||||
|
||||
return args
|
||||
}
|
||||
@@ -1,39 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
// BenchmarkLoggerWrite 测试日志写入性能
|
||||
func BenchmarkLoggerWrite(b *testing.B) {
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer logger.Close()
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
logger.Info("benchmark log message")
|
||||
}
|
||||
b.StopTimer()
|
||||
logger.Flush()
|
||||
}
|
||||
|
||||
// BenchmarkLoggerConcurrentWrite 测试并发日志写入性能
|
||||
func BenchmarkLoggerConcurrentWrite(b *testing.B) {
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer logger.Close()
|
||||
|
||||
b.ResetTimer()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
logger.Info("concurrent benchmark log message")
|
||||
}
|
||||
})
|
||||
b.StopTimer()
|
||||
logger.Flush()
|
||||
}
|
||||
7
codeagent-wrapper/cmd/codeagent/main.go
Normal file
7
codeagent-wrapper/cmd/codeagent/main.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package main
|
||||
|
||||
import app "codeagent-wrapper/internal/app"
|
||||
|
||||
func main() {
|
||||
app.Run()
|
||||
}
|
||||
@@ -1,287 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Config holds CLI configuration
|
||||
type Config struct {
|
||||
Mode string // "new" or "resume"
|
||||
Task string
|
||||
SessionID string
|
||||
WorkDir string
|
||||
ExplicitStdin bool
|
||||
Timeout int
|
||||
Backend string
|
||||
SkipPermissions bool
|
||||
MaxParallelWorkers int
|
||||
}
|
||||
|
||||
// ParallelConfig defines the JSON schema for parallel execution
|
||||
type ParallelConfig struct {
|
||||
Tasks []TaskSpec `json:"tasks"`
|
||||
GlobalBackend string `json:"backend,omitempty"`
|
||||
}
|
||||
|
||||
// TaskSpec describes an individual task entry in the parallel config
|
||||
type TaskSpec struct {
|
||||
ID string `json:"id"`
|
||||
Task string `json:"task"`
|
||||
WorkDir string `json:"workdir,omitempty"`
|
||||
Dependencies []string `json:"dependencies,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Backend string `json:"backend,omitempty"`
|
||||
Mode string `json:"-"`
|
||||
UseStdin bool `json:"-"`
|
||||
Context context.Context `json:"-"`
|
||||
}
|
||||
|
||||
// TaskResult captures the execution outcome of a task
|
||||
type TaskResult struct {
|
||||
TaskID string `json:"task_id"`
|
||||
ExitCode int `json:"exit_code"`
|
||||
Message string `json:"message"`
|
||||
SessionID string `json:"session_id"`
|
||||
Error string `json:"error"`
|
||||
LogPath string `json:"log_path"`
|
||||
// Structured report fields
|
||||
Coverage string `json:"coverage,omitempty"` // extracted coverage percentage (e.g., "92%")
|
||||
CoverageNum float64 `json:"coverage_num,omitempty"` // numeric coverage for comparison
|
||||
CoverageTarget float64 `json:"coverage_target,omitempty"` // target coverage (default 90)
|
||||
FilesChanged []string `json:"files_changed,omitempty"` // list of changed files
|
||||
KeyOutput string `json:"key_output,omitempty"` // brief summary of what was done
|
||||
TestsPassed int `json:"tests_passed,omitempty"` // number of tests passed
|
||||
TestsFailed int `json:"tests_failed,omitempty"` // number of tests failed
|
||||
sharedLog bool
|
||||
}
|
||||
|
||||
var backendRegistry = map[string]Backend{
|
||||
"codex": CodexBackend{},
|
||||
"claude": ClaudeBackend{},
|
||||
"gemini": GeminiBackend{},
|
||||
}
|
||||
|
||||
func selectBackend(name string) (Backend, error) {
|
||||
key := strings.ToLower(strings.TrimSpace(name))
|
||||
if key == "" {
|
||||
key = defaultBackendName
|
||||
}
|
||||
if backend, ok := backendRegistry[key]; ok {
|
||||
return backend, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unsupported backend %q", name)
|
||||
}
|
||||
|
||||
func envFlagEnabled(key string) bool {
|
||||
val, ok := os.LookupEnv(key)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
val = strings.TrimSpace(strings.ToLower(val))
|
||||
switch val {
|
||||
case "", "0", "false", "no", "off":
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func parseBoolFlag(val string, defaultValue bool) bool {
|
||||
val = strings.TrimSpace(strings.ToLower(val))
|
||||
switch val {
|
||||
case "1", "true", "yes", "on":
|
||||
return true
|
||||
case "0", "false", "no", "off":
|
||||
return false
|
||||
default:
|
||||
return defaultValue
|
||||
}
|
||||
}
|
||||
|
||||
func parseParallelConfig(data []byte) (*ParallelConfig, error) {
|
||||
trimmed := bytes.TrimSpace(data)
|
||||
if len(trimmed) == 0 {
|
||||
return nil, fmt.Errorf("parallel config is empty")
|
||||
}
|
||||
|
||||
tasks := strings.Split(string(trimmed), "---TASK---")
|
||||
var cfg ParallelConfig
|
||||
seen := make(map[string]struct{})
|
||||
|
||||
taskIndex := 0
|
||||
for _, taskBlock := range tasks {
|
||||
taskBlock = strings.TrimSpace(taskBlock)
|
||||
if taskBlock == "" {
|
||||
continue
|
||||
}
|
||||
taskIndex++
|
||||
|
||||
parts := strings.SplitN(taskBlock, "---CONTENT---", 2)
|
||||
if len(parts) != 2 {
|
||||
return nil, fmt.Errorf("task block #%d missing ---CONTENT--- separator", taskIndex)
|
||||
}
|
||||
|
||||
meta := strings.TrimSpace(parts[0])
|
||||
content := strings.TrimSpace(parts[1])
|
||||
|
||||
task := TaskSpec{WorkDir: defaultWorkdir}
|
||||
for _, line := range strings.Split(meta, "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
kv := strings.SplitN(line, ":", 2)
|
||||
if len(kv) != 2 {
|
||||
continue
|
||||
}
|
||||
key := strings.TrimSpace(kv[0])
|
||||
value := strings.TrimSpace(kv[1])
|
||||
|
||||
switch key {
|
||||
case "id":
|
||||
task.ID = value
|
||||
case "workdir":
|
||||
task.WorkDir = value
|
||||
case "session_id":
|
||||
task.SessionID = value
|
||||
task.Mode = "resume"
|
||||
case "backend":
|
||||
task.Backend = value
|
||||
case "dependencies":
|
||||
for _, dep := range strings.Split(value, ",") {
|
||||
dep = strings.TrimSpace(dep)
|
||||
if dep != "" {
|
||||
task.Dependencies = append(task.Dependencies, dep)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if task.Mode == "" {
|
||||
task.Mode = "new"
|
||||
}
|
||||
|
||||
if task.ID == "" {
|
||||
return nil, fmt.Errorf("task block #%d missing id field", taskIndex)
|
||||
}
|
||||
if content == "" {
|
||||
return nil, fmt.Errorf("task block #%d (%q) missing content", taskIndex, task.ID)
|
||||
}
|
||||
if task.Mode == "resume" && strings.TrimSpace(task.SessionID) == "" {
|
||||
return nil, fmt.Errorf("task block #%d (%q) has empty session_id", taskIndex, task.ID)
|
||||
}
|
||||
if _, exists := seen[task.ID]; exists {
|
||||
return nil, fmt.Errorf("task block #%d has duplicate id: %s", taskIndex, task.ID)
|
||||
}
|
||||
|
||||
task.Task = content
|
||||
cfg.Tasks = append(cfg.Tasks, task)
|
||||
seen[task.ID] = struct{}{}
|
||||
}
|
||||
|
||||
if len(cfg.Tasks) == 0 {
|
||||
return nil, fmt.Errorf("no tasks found")
|
||||
}
|
||||
|
||||
return &cfg, nil
|
||||
}
|
||||
|
||||
func parseArgs() (*Config, error) {
|
||||
args := os.Args[1:]
|
||||
if len(args) == 0 {
|
||||
return nil, fmt.Errorf("task required")
|
||||
}
|
||||
|
||||
backendName := defaultBackendName
|
||||
skipPermissions := envFlagEnabled("CODEAGENT_SKIP_PERMISSIONS")
|
||||
filtered := make([]string, 0, len(args))
|
||||
for i := 0; i < len(args); i++ {
|
||||
arg := args[i]
|
||||
switch {
|
||||
case arg == "--backend":
|
||||
if i+1 >= len(args) {
|
||||
return nil, fmt.Errorf("--backend flag requires a value")
|
||||
}
|
||||
backendName = args[i+1]
|
||||
i++
|
||||
continue
|
||||
case strings.HasPrefix(arg, "--backend="):
|
||||
value := strings.TrimPrefix(arg, "--backend=")
|
||||
if value == "" {
|
||||
return nil, fmt.Errorf("--backend flag requires a value")
|
||||
}
|
||||
backendName = value
|
||||
continue
|
||||
case arg == "--skip-permissions", arg == "--dangerously-skip-permissions":
|
||||
skipPermissions = true
|
||||
continue
|
||||
case strings.HasPrefix(arg, "--skip-permissions="):
|
||||
skipPermissions = parseBoolFlag(strings.TrimPrefix(arg, "--skip-permissions="), skipPermissions)
|
||||
continue
|
||||
case strings.HasPrefix(arg, "--dangerously-skip-permissions="):
|
||||
skipPermissions = parseBoolFlag(strings.TrimPrefix(arg, "--dangerously-skip-permissions="), skipPermissions)
|
||||
continue
|
||||
}
|
||||
filtered = append(filtered, arg)
|
||||
}
|
||||
|
||||
if len(filtered) == 0 {
|
||||
return nil, fmt.Errorf("task required")
|
||||
}
|
||||
args = filtered
|
||||
|
||||
cfg := &Config{WorkDir: defaultWorkdir, Backend: backendName, SkipPermissions: skipPermissions}
|
||||
cfg.MaxParallelWorkers = resolveMaxParallelWorkers()
|
||||
|
||||
if args[0] == "resume" {
|
||||
if len(args) < 3 {
|
||||
return nil, fmt.Errorf("resume mode requires: resume <session_id> <task>")
|
||||
}
|
||||
cfg.Mode = "resume"
|
||||
cfg.SessionID = strings.TrimSpace(args[1])
|
||||
if cfg.SessionID == "" {
|
||||
return nil, fmt.Errorf("resume mode requires non-empty session_id")
|
||||
}
|
||||
cfg.Task = args[2]
|
||||
cfg.ExplicitStdin = (args[2] == "-")
|
||||
if len(args) > 3 {
|
||||
cfg.WorkDir = args[3]
|
||||
}
|
||||
} else {
|
||||
cfg.Mode = "new"
|
||||
cfg.Task = args[0]
|
||||
cfg.ExplicitStdin = (args[0] == "-")
|
||||
if len(args) > 1 {
|
||||
cfg.WorkDir = args[1]
|
||||
}
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
const maxParallelWorkersLimit = 100
|
||||
|
||||
func resolveMaxParallelWorkers() int {
|
||||
raw := strings.TrimSpace(os.Getenv("CODEAGENT_MAX_PARALLEL_WORKERS"))
|
||||
if raw == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
value, err := strconv.Atoi(raw)
|
||||
if err != nil || value < 0 {
|
||||
logWarn(fmt.Sprintf("Invalid CODEAGENT_MAX_PARALLEL_WORKERS=%q, falling back to unlimited", raw))
|
||||
return 0
|
||||
}
|
||||
|
||||
if value > maxParallelWorkersLimit {
|
||||
logWarn(fmt.Sprintf("CODEAGENT_MAX_PARALLEL_WORKERS=%d exceeds limit, capping at %d", value, maxParallelWorkersLimit))
|
||||
return maxParallelWorkersLimit
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
@@ -1,3 +1,43 @@
|
||||
module codeagent-wrapper
|
||||
|
||||
go 1.21
|
||||
|
||||
require (
|
||||
github.com/goccy/go-json v0.10.5
|
||||
github.com/rs/zerolog v1.34.0
|
||||
github.com/shirou/gopsutil/v3 v3.24.5
|
||||
github.com/spf13/cobra v1.8.1
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/spf13/viper v1.19.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||
github.com/magiconair/properties v1.8.7 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
||||
github.com/sagikazarmark/locafero v0.4.0 // indirect
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
||||
github.com/shoenig/go-m1cpu v0.1.6 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.11.0 // indirect
|
||||
github.com/spf13/cast v1.6.0 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.12 // indirect
|
||||
github.com/tklauser/numcpus v0.6.1 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
go.uber.org/atomic v1.9.0 // indirect
|
||||
go.uber.org/multierr v1.9.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
|
||||
golang.org/x/sys v0.20.0 // indirect
|
||||
golang.org/x/text v0.14.0 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
||||
117
codeagent-wrapper/go.sum
Normal file
117
codeagent-wrapper/go.sum
Normal file
@@ -0,0 +1,117 @@
|
||||
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
||||
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||
github.com/pelletier/go-toml/v2 v2.2.2/go.mod h1:1t835xjRzz80PqgE6HHgN2JOsmgYu/h4qDAS4n929Rs=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
|
||||
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6keLGt6kNQ=
|
||||
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5 h1:i0t8kL+kQTvpAYToeuiVk3TgDeKOFioZO3Ztz/iZ9pI=
|
||||
github.com/shirou/gopsutil/v3 v3.24.5/go.mod h1:bsoOS1aStSs9ErQ1WWfxllSeS1K5D+U30r2NfcubMVk=
|
||||
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
|
||||
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
|
||||
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
|
||||
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
||||
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
||||
github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0=
|
||||
github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM=
|
||||
github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
|
||||
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.uber.org/atomic v1.9.0 h1:ECmE8Bn/WFTYwEW/bpKD3M8VtR/zQVbavAoalC1PYyE=
|
||||
go.uber.org/atomic v1.9.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
|
||||
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
150
codeagent-wrapper/internal/app/agent_validation_test.go
Normal file
150
codeagent-wrapper/internal/app/agent_validation_test.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
executor "codeagent-wrapper/internal/executor"
|
||||
)
|
||||
|
||||
func TestValidateAgentName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
wantErr bool
|
||||
}{
|
||||
{name: "simple", input: "develop", wantErr: false},
|
||||
{name: "upper", input: "ABC", wantErr: false},
|
||||
{name: "digits", input: "a1", wantErr: false},
|
||||
{name: "dash underscore", input: "a-b_c", wantErr: false},
|
||||
{name: "empty", input: "", wantErr: true},
|
||||
{name: "space", input: "a b", wantErr: true},
|
||||
{name: "slash", input: "a/b", wantErr: true},
|
||||
{name: "dotdot", input: "../evil", wantErr: true},
|
||||
{name: "unicode", input: "中文", wantErr: true},
|
||||
{name: "symbol", input: "a$b", wantErr: true},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := config.ValidateAgentName(tt.input)
|
||||
if (err != nil) != tt.wantErr {
|
||||
t.Fatalf("validateAgentName(%q) err=%v, wantErr=%v", tt.input, err, tt.wantErr)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseArgs_InvalidAgentNameRejected(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
os.Args = []string{"codeagent-wrapper", "--agent", "../evil", "task"}
|
||||
if _, err := parseArgs(); err == nil {
|
||||
t.Fatalf("expected parseArgs to reject invalid agent name")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseParallelConfig_InvalidAgentNameRejected(t *testing.T) {
|
||||
input := `---TASK---
|
||||
id: task-1
|
||||
agent: ../evil
|
||||
---CONTENT---
|
||||
do something`
|
||||
if _, err := parseParallelConfig([]byte(input)); err == nil {
|
||||
t.Fatalf("expected parseParallelConfig to reject invalid agent name")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseParallelConfig_ResolvesAgentPromptFile(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(config.ResetModelsConfigCacheForTest)
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
|
||||
configDir := filepath.Join(home, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(configDir, "models.json"), []byte(`{
|
||||
"default_backend": "codex",
|
||||
"default_model": "gpt-test",
|
||||
"agents": {
|
||||
"custom-agent": {
|
||||
"backend": "codex",
|
||||
"model": "gpt-test",
|
||||
"prompt_file": "~/.claude/prompt.md"
|
||||
}
|
||||
}
|
||||
}`), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
input := `---TASK---
|
||||
id: task-1
|
||||
agent: custom-agent
|
||||
---CONTENT---
|
||||
do something`
|
||||
cfg, err := parseParallelConfig([]byte(input))
|
||||
if err != nil {
|
||||
t.Fatalf("parseParallelConfig() unexpected error: %v", err)
|
||||
}
|
||||
if len(cfg.Tasks) != 1 {
|
||||
t.Fatalf("expected 1 task, got %d", len(cfg.Tasks))
|
||||
}
|
||||
if got := cfg.Tasks[0].PromptFile; got != "~/.claude/prompt.md" {
|
||||
t.Fatalf("PromptFile = %q, want %q", got, "~/.claude/prompt.md")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefaultRunCodexTaskFn_AppliesAgentPromptFile(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
claudeDir := filepath.Join(home, ".claude")
|
||||
if err := os.MkdirAll(claudeDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(claudeDir, "prompt.md"), []byte("P\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
fake := newFakeCmd(fakeCmdConfig{
|
||||
StdoutPlan: []fakeStdoutEvent{
|
||||
{Data: `{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}` + "\n"},
|
||||
},
|
||||
WaitDelay: 2 * time.Millisecond,
|
||||
})
|
||||
|
||||
_ = executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner { return fake })
|
||||
_ = executor.SetSelectBackendFn(func(name string) (Backend, error) {
|
||||
return testBackend{
|
||||
name: name,
|
||||
command: "fake-cmd",
|
||||
argsFn: func(cfg *Config, targetArg string) []string {
|
||||
return []string{targetArg}
|
||||
},
|
||||
}, nil
|
||||
})
|
||||
|
||||
res := defaultRunCodexTaskFn(TaskSpec{
|
||||
ID: "t",
|
||||
Task: "do",
|
||||
Backend: "codex",
|
||||
PromptFile: "~/.claude/prompt.md",
|
||||
}, 5)
|
||||
if res.ExitCode != 0 {
|
||||
t.Fatalf("unexpected result: %+v", res)
|
||||
}
|
||||
|
||||
want := "<agent-prompt>\nP\n</agent-prompt>\n\ndo"
|
||||
if got := fake.StdinContents(); got != want {
|
||||
t.Fatalf("stdin mismatch:\n got=%q\nwant=%q", got, want)
|
||||
}
|
||||
}
|
||||
278
codeagent-wrapper/internal/app/app.go
Normal file
278
codeagent-wrapper/internal/app/app.go
Normal file
@@ -0,0 +1,278 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
version = "6.0.0-alpha1"
|
||||
defaultWorkdir = "."
|
||||
defaultTimeout = 7200 // seconds (2 hours)
|
||||
defaultCoverageTarget = 90.0
|
||||
codexLogLineLimit = 1000
|
||||
stdinSpecialChars = "\n\\\"'`$"
|
||||
stderrCaptureLimit = 4 * 1024
|
||||
defaultBackendName = "codex"
|
||||
defaultCodexCommand = "codex"
|
||||
|
||||
// stdout close reasons
|
||||
stdoutCloseReasonWait = "wait-done"
|
||||
stdoutCloseReasonDrain = "drain-timeout"
|
||||
stdoutCloseReasonCtx = "context-cancel"
|
||||
stdoutDrainTimeout = 100 * time.Millisecond
|
||||
)
|
||||
|
||||
// Test hooks for dependency injection
|
||||
var (
|
||||
stdinReader io.Reader = os.Stdin
|
||||
isTerminalFn = defaultIsTerminal
|
||||
codexCommand = defaultCodexCommand
|
||||
cleanupHook func()
|
||||
startupCleanupAsync = true
|
||||
|
||||
buildCodexArgsFn = buildCodexArgs
|
||||
selectBackendFn = selectBackend
|
||||
cleanupLogsFn = cleanupOldLogs
|
||||
defaultBuildArgsFn = buildCodexArgs
|
||||
runTaskFn = runCodexTask
|
||||
exitFn = os.Exit
|
||||
)
|
||||
|
||||
func runStartupCleanup() {
|
||||
if cleanupLogsFn == nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs panic: %v", r))
|
||||
}
|
||||
}()
|
||||
if _, err := cleanupLogsFn(); err != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs error: %v", err))
|
||||
}
|
||||
}
|
||||
|
||||
func scheduleStartupCleanup() {
|
||||
if !startupCleanupAsync {
|
||||
runStartupCleanup()
|
||||
return
|
||||
}
|
||||
if cleanupLogsFn == nil {
|
||||
return
|
||||
}
|
||||
fn := cleanupLogsFn
|
||||
go func() {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs panic: %v", r))
|
||||
}
|
||||
}()
|
||||
if _, err := fn(); err != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs error: %v", err))
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func runCleanupMode() int {
|
||||
if cleanupLogsFn == nil {
|
||||
fmt.Fprintln(os.Stderr, "Cleanup failed: log cleanup function not configured")
|
||||
return 1
|
||||
}
|
||||
|
||||
stats, err := cleanupLogsFn()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cleanup failed: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
fmt.Println("Cleanup completed")
|
||||
fmt.Printf("Files scanned: %d\n", stats.Scanned)
|
||||
fmt.Printf("Files deleted: %d\n", stats.Deleted)
|
||||
if len(stats.DeletedFiles) > 0 {
|
||||
for _, f := range stats.DeletedFiles {
|
||||
fmt.Printf(" - %s\n", f)
|
||||
}
|
||||
}
|
||||
fmt.Printf("Files kept: %d\n", stats.Kept)
|
||||
if len(stats.KeptFiles) > 0 {
|
||||
for _, f := range stats.KeptFiles {
|
||||
fmt.Printf(" - %s\n", f)
|
||||
}
|
||||
}
|
||||
if stats.Errors > 0 {
|
||||
fmt.Printf("Deletion errors: %d\n", stats.Errors)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func readAgentPromptFile(path string, allowOutsideClaudeDir bool) (string, error) {
|
||||
raw := strings.TrimSpace(path)
|
||||
if raw == "" {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
expanded := raw
|
||||
if raw == "~" || strings.HasPrefix(raw, "~/") || strings.HasPrefix(raw, "~\\") {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if raw == "~" {
|
||||
expanded = home
|
||||
} else {
|
||||
expanded = home + raw[1:]
|
||||
}
|
||||
}
|
||||
|
||||
absPath, err := filepath.Abs(expanded)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
absPath = filepath.Clean(absPath)
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
if !allowOutsideClaudeDir {
|
||||
return "", err
|
||||
}
|
||||
logWarn(fmt.Sprintf("Failed to resolve home directory for prompt file validation: %v; proceeding without restriction", err))
|
||||
} else {
|
||||
allowedDirs := []string{
|
||||
filepath.Clean(filepath.Join(home, ".claude")),
|
||||
filepath.Clean(filepath.Join(home, ".codeagent", "agents")),
|
||||
}
|
||||
for i := range allowedDirs {
|
||||
allowedAbs, err := filepath.Abs(allowedDirs[i])
|
||||
if err == nil {
|
||||
allowedDirs[i] = filepath.Clean(allowedAbs)
|
||||
}
|
||||
}
|
||||
|
||||
isWithinDir := func(path, dir string) bool {
|
||||
rel, err := filepath.Rel(dir, path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
rel = filepath.Clean(rel)
|
||||
if rel == "." {
|
||||
return true
|
||||
}
|
||||
if rel == ".." {
|
||||
return false
|
||||
}
|
||||
prefix := ".." + string(os.PathSeparator)
|
||||
return !strings.HasPrefix(rel, prefix)
|
||||
}
|
||||
|
||||
if !allowOutsideClaudeDir {
|
||||
withinAllowed := false
|
||||
for _, dir := range allowedDirs {
|
||||
if isWithinDir(absPath, dir) {
|
||||
withinAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinAllowed {
|
||||
logWarn(fmt.Sprintf("Refusing to read prompt file outside allowed dirs (%s): %s", strings.Join(allowedDirs, ", "), absPath))
|
||||
return "", fmt.Errorf("prompt file must be under ~/.claude or ~/.codeagent/agents")
|
||||
}
|
||||
|
||||
resolvedPath, errPath := filepath.EvalSymlinks(absPath)
|
||||
if errPath == nil {
|
||||
resolvedPath = filepath.Clean(resolvedPath)
|
||||
resolvedAllowed := make([]string, 0, len(allowedDirs))
|
||||
for _, dir := range allowedDirs {
|
||||
resolvedBase, errBase := filepath.EvalSymlinks(dir)
|
||||
if errBase != nil {
|
||||
continue
|
||||
}
|
||||
resolvedAllowed = append(resolvedAllowed, filepath.Clean(resolvedBase))
|
||||
}
|
||||
if len(resolvedAllowed) > 0 {
|
||||
withinResolved := false
|
||||
for _, dir := range resolvedAllowed {
|
||||
if isWithinDir(resolvedPath, dir) {
|
||||
withinResolved = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinResolved {
|
||||
logWarn(fmt.Sprintf("Refusing to read prompt file outside allowed dirs (%s) (resolved): %s", strings.Join(resolvedAllowed, ", "), resolvedPath))
|
||||
return "", fmt.Errorf("prompt file must be under ~/.claude or ~/.codeagent/agents")
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
withinAllowed := false
|
||||
for _, dir := range allowedDirs {
|
||||
if isWithinDir(absPath, dir) {
|
||||
withinAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinAllowed {
|
||||
logWarn(fmt.Sprintf("Reading prompt file outside allowed dirs (%s): %s", strings.Join(allowedDirs, ", "), absPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(absPath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return strings.TrimRight(string(data), "\r\n"), nil
|
||||
}
|
||||
|
||||
func wrapTaskWithAgentPrompt(prompt string, task string) string {
|
||||
return "<agent-prompt>\n" + prompt + "\n</agent-prompt>\n\n" + task
|
||||
}
|
||||
|
||||
func runCleanupHook() {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Flush()
|
||||
}
|
||||
if cleanupHook != nil {
|
||||
cleanupHook()
|
||||
}
|
||||
}
|
||||
|
||||
func printHelp() {
|
||||
name := currentWrapperName()
|
||||
help := fmt.Sprintf(`%[1]s - Go wrapper for AI CLI backends
|
||||
|
||||
Usage:
|
||||
%[1]s "task" [workdir]
|
||||
%[1]s --backend claude "task" [workdir]
|
||||
%[1]s --prompt-file /path/to/prompt.md "task" [workdir]
|
||||
%[1]s - [workdir] Read task from stdin
|
||||
%[1]s resume <session_id> "task" [workdir]
|
||||
%[1]s resume <session_id> - [workdir]
|
||||
%[1]s --parallel Run tasks in parallel (config from stdin)
|
||||
%[1]s --parallel --full-output Run tasks in parallel with full output (legacy)
|
||||
%[1]s --version
|
||||
%[1]s --help
|
||||
|
||||
Parallel mode examples:
|
||||
%[1]s --parallel < tasks.txt
|
||||
echo '...' | %[1]s --parallel
|
||||
%[1]s --parallel --full-output < tasks.txt
|
||||
%[1]s --parallel <<'EOF'
|
||||
|
||||
Environment Variables:
|
||||
CODEX_TIMEOUT Timeout in milliseconds (default: 7200000)
|
||||
CODEAGENT_ASCII_MODE Use ASCII symbols instead of Unicode (PASS/WARN/FAIL)
|
||||
|
||||
Exit Codes:
|
||||
0 Success
|
||||
1 General error (missing args, no output)
|
||||
124 Timeout
|
||||
127 backend command not found
|
||||
130 Interrupted (Ctrl+C)
|
||||
* Passthrough from backend process`, name)
|
||||
fmt.Println(help)
|
||||
}
|
||||
9
codeagent-wrapper/internal/app/backend.go
Normal file
9
codeagent-wrapper/internal/app/backend.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package wrapper
|
||||
|
||||
import backend "codeagent-wrapper/internal/backend"
|
||||
|
||||
type Backend = backend.Backend
|
||||
type CodexBackend = backend.CodexBackend
|
||||
type ClaudeBackend = backend.ClaudeBackend
|
||||
type GeminiBackend = backend.GeminiBackend
|
||||
type OpencodeBackend = backend.OpencodeBackend
|
||||
7
codeagent-wrapper/internal/app/backend_init.go
Normal file
7
codeagent-wrapper/internal/app/backend_init.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package wrapper
|
||||
|
||||
import backend "codeagent-wrapper/internal/backend"
|
||||
|
||||
func init() {
|
||||
backend.SetLogFuncs(logWarn, logError)
|
||||
}
|
||||
5
codeagent-wrapper/internal/app/backend_registry.go
Normal file
5
codeagent-wrapper/internal/app/backend_registry.go
Normal file
@@ -0,0 +1,5 @@
|
||||
package wrapper
|
||||
|
||||
import backend "codeagent-wrapper/internal/backend"
|
||||
|
||||
func selectBackend(name string) (Backend, error) { return backend.Select(name) }
|
||||
103
codeagent-wrapper/internal/app/bench_test.go
Normal file
103
codeagent-wrapper/internal/app/bench_test.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
var (
|
||||
benchCmdSink any
|
||||
benchConfigSink *Config
|
||||
benchMessageSink string
|
||||
benchThreadIDSink string
|
||||
)
|
||||
|
||||
// BenchmarkStartup_NewRootCommand measures CLI startup overhead (command+flags construction).
|
||||
func BenchmarkStartup_NewRootCommand(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
benchCmdSink = newRootCommand()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkConfigParse_ParseArgs measures config parsing from argv/env (steady-state).
|
||||
func BenchmarkConfigParse_ParseArgs(b *testing.B) {
|
||||
home := b.TempDir()
|
||||
b.Setenv("HOME", home)
|
||||
b.Setenv("USERPROFILE", home)
|
||||
|
||||
config.ResetModelsConfigCacheForTest()
|
||||
b.Cleanup(config.ResetModelsConfigCacheForTest)
|
||||
|
||||
origArgs := os.Args
|
||||
os.Args = []string{"codeagent-wrapper", "--agent", "develop", "task"}
|
||||
b.Cleanup(func() { os.Args = origArgs })
|
||||
|
||||
if _, err := parseArgs(); err != nil {
|
||||
b.Fatalf("warmup parseArgs() error: %v", err)
|
||||
}
|
||||
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
cfg, err := parseArgs()
|
||||
if err != nil {
|
||||
b.Fatalf("parseArgs() error: %v", err)
|
||||
}
|
||||
benchConfigSink = cfg
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkJSONParse_ParseJSONStreamInternal measures line-delimited JSON stream parsing.
|
||||
func BenchmarkJSONParse_ParseJSONStreamInternal(b *testing.B) {
|
||||
stream := []byte(
|
||||
`{"type":"thread.started","thread_id":"t"}` + "\n" +
|
||||
`{"type":"item.completed","item":{"type":"agent_message","text":"hello"}}` + "\n" +
|
||||
`{"type":"thread.completed","thread_id":"t"}` + "\n",
|
||||
)
|
||||
b.SetBytes(int64(len(stream)))
|
||||
|
||||
b.ReportAllocs()
|
||||
for i := 0; i < b.N; i++ {
|
||||
message, threadID := parseJSONStreamInternal(bytes.NewReader(stream), nil, nil, nil, nil)
|
||||
benchMessageSink = message
|
||||
benchThreadIDSink = threadID
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkLoggerWrite 测试日志写入性能
|
||||
func BenchmarkLoggerWrite(b *testing.B) {
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer logger.Close()
|
||||
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
logger.Info("benchmark log message")
|
||||
}
|
||||
b.StopTimer()
|
||||
logger.Flush()
|
||||
}
|
||||
|
||||
// BenchmarkLoggerConcurrentWrite 测试并发日志写入性能
|
||||
func BenchmarkLoggerConcurrentWrite(b *testing.B) {
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer logger.Close()
|
||||
|
||||
b.ResetTimer()
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
logger.Info("concurrent benchmark log message")
|
||||
}
|
||||
})
|
||||
b.StopTimer()
|
||||
logger.Flush()
|
||||
}
|
||||
663
codeagent-wrapper/internal/app/cli.go
Normal file
663
codeagent-wrapper/internal/app/cli.go
Normal file
@@ -0,0 +1,663 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
type exitError struct {
|
||||
code int
|
||||
}
|
||||
|
||||
func (e exitError) Error() string {
|
||||
return fmt.Sprintf("exit %d", e.code)
|
||||
}
|
||||
|
||||
type cliOptions struct {
|
||||
Backend string
|
||||
Model string
|
||||
ReasoningEffort string
|
||||
Agent string
|
||||
PromptFile string
|
||||
SkipPermissions bool
|
||||
|
||||
Parallel bool
|
||||
FullOutput bool
|
||||
|
||||
Cleanup bool
|
||||
Version bool
|
||||
ConfigFile string
|
||||
}
|
||||
|
||||
func Main() {
|
||||
Run()
|
||||
}
|
||||
|
||||
// Run is the program entrypoint for cmd/codeagent/main.go.
|
||||
func Run() {
|
||||
exitFn(run())
|
||||
}
|
||||
|
||||
func run() int {
|
||||
cmd := newRootCommand()
|
||||
cmd.SetArgs(os.Args[1:])
|
||||
if err := cmd.Execute(); err != nil {
|
||||
var ee exitError
|
||||
if errors.As(err, &ee) {
|
||||
return ee.code
|
||||
}
|
||||
return 1
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func newRootCommand() *cobra.Command {
|
||||
name := currentWrapperName()
|
||||
opts := &cliOptions{}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: fmt.Sprintf("%s [flags] <task>|resume <session_id> <task> [workdir]", name),
|
||||
Short: "Go wrapper for AI CLI backends",
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
Args: cobra.ArbitraryArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
if opts.Version {
|
||||
fmt.Printf("%s version %s\n", name, version)
|
||||
return nil
|
||||
}
|
||||
if opts.Cleanup {
|
||||
code := runCleanupMode()
|
||||
if code == 0 {
|
||||
return nil
|
||||
}
|
||||
return exitError{code: code}
|
||||
}
|
||||
|
||||
exitCode := runWithLoggerAndCleanup(func() int {
|
||||
v, err := config.NewViper(opts.ConfigFile)
|
||||
if err != nil {
|
||||
logError(err.Error())
|
||||
return 1
|
||||
}
|
||||
|
||||
if opts.Parallel {
|
||||
return runParallelMode(cmd, args, opts, v, name)
|
||||
}
|
||||
|
||||
logInfo("Script started")
|
||||
|
||||
cfg, err := buildSingleConfig(cmd, args, os.Args[1:], opts, v)
|
||||
if err != nil {
|
||||
logError(err.Error())
|
||||
return 1
|
||||
}
|
||||
logInfo(fmt.Sprintf("Parsed args: mode=%s, task_len=%d, backend=%s", cfg.Mode, len(cfg.Task), cfg.Backend))
|
||||
return runSingleMode(cfg, name)
|
||||
})
|
||||
|
||||
if exitCode == 0 {
|
||||
return nil
|
||||
}
|
||||
return exitError{code: exitCode}
|
||||
},
|
||||
}
|
||||
cmd.CompletionOptions.DisableDefaultCmd = true
|
||||
|
||||
addRootFlags(cmd.Flags(), opts)
|
||||
cmd.AddCommand(newVersionCommand(name), newCleanupCommand())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func addRootFlags(fs *pflag.FlagSet, opts *cliOptions) {
|
||||
fs.StringVar(&opts.ConfigFile, "config", "", "Config file path (default: $HOME/.codeagent/config.*)")
|
||||
fs.BoolVarP(&opts.Version, "version", "v", false, "Print version and exit")
|
||||
fs.BoolVar(&opts.Cleanup, "cleanup", false, "Clean up old logs and exit")
|
||||
|
||||
fs.BoolVar(&opts.Parallel, "parallel", false, "Run tasks in parallel (config from stdin)")
|
||||
fs.BoolVar(&opts.FullOutput, "full-output", false, "Parallel mode: include full task output (legacy)")
|
||||
|
||||
fs.StringVar(&opts.Backend, "backend", defaultBackendName, "Backend to use (codex, claude, gemini, opencode)")
|
||||
fs.StringVar(&opts.Model, "model", "", "Model override")
|
||||
fs.StringVar(&opts.ReasoningEffort, "reasoning-effort", "", "Reasoning effort (backend-specific)")
|
||||
fs.StringVar(&opts.Agent, "agent", "", "Agent preset name (from ~/.codeagent/models.json)")
|
||||
fs.StringVar(&opts.PromptFile, "prompt-file", "", "Prompt file path")
|
||||
|
||||
fs.BoolVar(&opts.SkipPermissions, "skip-permissions", false, "Skip permissions prompts (also via CODEAGENT_SKIP_PERMISSIONS)")
|
||||
fs.BoolVar(&opts.SkipPermissions, "dangerously-skip-permissions", false, "Alias for --skip-permissions")
|
||||
}
|
||||
|
||||
func newVersionCommand(name string) *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Print version and exit",
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
fmt.Printf("%s version %s\n", name, version)
|
||||
return nil
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func newCleanupCommand() *cobra.Command {
|
||||
return &cobra.Command{
|
||||
Use: "cleanup",
|
||||
Short: "Clean up old logs and exit",
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
code := runCleanupMode()
|
||||
if code == 0 {
|
||||
return nil
|
||||
}
|
||||
return exitError{code: code}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func runWithLoggerAndCleanup(fn func() int) (exitCode int) {
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to initialize logger: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
setLogger(logger)
|
||||
|
||||
defer func() {
|
||||
logger := activeLogger()
|
||||
if logger != nil {
|
||||
logger.Flush()
|
||||
}
|
||||
if err := closeLogger(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to close logger: %v\n", err)
|
||||
}
|
||||
if logger == nil {
|
||||
return
|
||||
}
|
||||
|
||||
if exitCode != 0 {
|
||||
if entries := logger.ExtractRecentErrors(10); len(entries) > 0 {
|
||||
fmt.Fprintln(os.Stderr, "\n=== Recent Errors ===")
|
||||
for _, entry := range entries {
|
||||
fmt.Fprintln(os.Stderr, entry)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Log file: %s (deleted)\n", logger.Path())
|
||||
}
|
||||
}
|
||||
_ = logger.RemoveLogFile()
|
||||
}()
|
||||
defer runCleanupHook()
|
||||
|
||||
// Clean up stale logs from previous runs.
|
||||
scheduleStartupCleanup()
|
||||
|
||||
return fn()
|
||||
}
|
||||
|
||||
func parseArgs() (*Config, error) {
|
||||
opts := &cliOptions{}
|
||||
cmd := &cobra.Command{SilenceErrors: true, SilenceUsage: true, Args: cobra.ArbitraryArgs}
|
||||
addRootFlags(cmd.Flags(), opts)
|
||||
|
||||
rawArgv := os.Args[1:]
|
||||
if err := cmd.ParseFlags(rawArgv); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
args := cmd.Flags().Args()
|
||||
|
||||
v, err := config.NewViper(opts.ConfigFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return buildSingleConfig(cmd, args, rawArgv, opts, v)
|
||||
}
|
||||
|
||||
func buildSingleConfig(cmd *cobra.Command, args []string, rawArgv []string, opts *cliOptions, v *viper.Viper) (*Config, error) {
|
||||
backendName := defaultBackendName
|
||||
model := ""
|
||||
reasoningEffort := ""
|
||||
agentName := ""
|
||||
promptFile := ""
|
||||
promptFileExplicit := false
|
||||
yolo := false
|
||||
|
||||
if cmd.Flags().Changed("agent") {
|
||||
agentName = strings.TrimSpace(opts.Agent)
|
||||
if agentName == "" {
|
||||
return nil, fmt.Errorf("--agent flag requires a value")
|
||||
}
|
||||
if err := config.ValidateAgentName(agentName); err != nil {
|
||||
return nil, fmt.Errorf("--agent flag invalid value: %w", err)
|
||||
}
|
||||
} else {
|
||||
agentName = strings.TrimSpace(v.GetString("agent"))
|
||||
if agentName != "" {
|
||||
if err := config.ValidateAgentName(agentName); err != nil {
|
||||
return nil, fmt.Errorf("--agent flag invalid value: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var resolvedBackend, resolvedModel, resolvedPromptFile, resolvedReasoning string
|
||||
if agentName != "" {
|
||||
var resolvedYolo bool
|
||||
resolvedBackend, resolvedModel, resolvedPromptFile, resolvedReasoning, _, _, resolvedYolo = config.ResolveAgentConfig(agentName)
|
||||
yolo = resolvedYolo
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("prompt-file") {
|
||||
promptFile = strings.TrimSpace(opts.PromptFile)
|
||||
if promptFile == "" {
|
||||
return nil, fmt.Errorf("--prompt-file flag requires a value")
|
||||
}
|
||||
promptFileExplicit = true
|
||||
} else if val := strings.TrimSpace(v.GetString("prompt-file")); val != "" {
|
||||
promptFile = val
|
||||
promptFileExplicit = true
|
||||
} else {
|
||||
promptFile = resolvedPromptFile
|
||||
}
|
||||
|
||||
agentFlagChanged := cmd.Flags().Changed("agent")
|
||||
backendFlagChanged := cmd.Flags().Changed("backend")
|
||||
if backendFlagChanged {
|
||||
backendName = strings.TrimSpace(opts.Backend)
|
||||
if backendName == "" {
|
||||
return nil, fmt.Errorf("--backend flag requires a value")
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case agentFlagChanged && backendFlagChanged && lastFlagIndex(rawArgv, "agent") > lastFlagIndex(rawArgv, "backend"):
|
||||
backendName = resolvedBackend
|
||||
case !backendFlagChanged && agentName != "":
|
||||
backendName = resolvedBackend
|
||||
case !backendFlagChanged:
|
||||
if val := strings.TrimSpace(v.GetString("backend")); val != "" {
|
||||
backendName = val
|
||||
}
|
||||
}
|
||||
|
||||
modelFlagChanged := cmd.Flags().Changed("model")
|
||||
if modelFlagChanged {
|
||||
model = strings.TrimSpace(opts.Model)
|
||||
if model == "" {
|
||||
return nil, fmt.Errorf("--model flag requires a value")
|
||||
}
|
||||
}
|
||||
|
||||
switch {
|
||||
case agentFlagChanged && modelFlagChanged && lastFlagIndex(rawArgv, "agent") > lastFlagIndex(rawArgv, "model"):
|
||||
model = strings.TrimSpace(resolvedModel)
|
||||
case !modelFlagChanged && agentName != "":
|
||||
model = strings.TrimSpace(resolvedModel)
|
||||
case !modelFlagChanged:
|
||||
model = strings.TrimSpace(v.GetString("model"))
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("reasoning-effort") {
|
||||
reasoningEffort = strings.TrimSpace(opts.ReasoningEffort)
|
||||
if reasoningEffort == "" {
|
||||
return nil, fmt.Errorf("--reasoning-effort flag requires a value")
|
||||
}
|
||||
} else if val := strings.TrimSpace(v.GetString("reasoning-effort")); val != "" {
|
||||
reasoningEffort = val
|
||||
} else if agentName != "" {
|
||||
reasoningEffort = strings.TrimSpace(resolvedReasoning)
|
||||
}
|
||||
|
||||
skipChanged := cmd.Flags().Changed("skip-permissions") || cmd.Flags().Changed("dangerously-skip-permissions")
|
||||
skipPermissions := false
|
||||
if skipChanged {
|
||||
skipPermissions = opts.SkipPermissions
|
||||
} else {
|
||||
skipPermissions = v.GetBool("skip-permissions")
|
||||
}
|
||||
|
||||
if len(args) == 0 {
|
||||
return nil, fmt.Errorf("task required")
|
||||
}
|
||||
|
||||
cfg := &Config{
|
||||
WorkDir: defaultWorkdir,
|
||||
Backend: backendName,
|
||||
Agent: agentName,
|
||||
PromptFile: promptFile,
|
||||
PromptFileExplicit: promptFileExplicit,
|
||||
SkipPermissions: skipPermissions,
|
||||
Yolo: yolo,
|
||||
Model: model,
|
||||
ReasoningEffort: reasoningEffort,
|
||||
MaxParallelWorkers: config.ResolveMaxParallelWorkers(),
|
||||
}
|
||||
|
||||
if args[0] == "resume" {
|
||||
if len(args) < 3 {
|
||||
return nil, fmt.Errorf("resume mode requires: resume <session_id> <task>")
|
||||
}
|
||||
cfg.Mode = "resume"
|
||||
cfg.SessionID = strings.TrimSpace(args[1])
|
||||
if cfg.SessionID == "" {
|
||||
return nil, fmt.Errorf("resume mode requires non-empty session_id")
|
||||
}
|
||||
cfg.Task = args[2]
|
||||
cfg.ExplicitStdin = (args[2] == "-")
|
||||
if len(args) > 3 {
|
||||
if args[3] == "-" {
|
||||
return nil, fmt.Errorf("invalid workdir: '-' is not a valid directory path")
|
||||
}
|
||||
cfg.WorkDir = args[3]
|
||||
}
|
||||
} else {
|
||||
cfg.Mode = "new"
|
||||
cfg.Task = args[0]
|
||||
cfg.ExplicitStdin = (args[0] == "-")
|
||||
if len(args) > 1 {
|
||||
if args[1] == "-" {
|
||||
return nil, fmt.Errorf("invalid workdir: '-' is not a valid directory path")
|
||||
}
|
||||
cfg.WorkDir = args[1]
|
||||
}
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
func lastFlagIndex(argv []string, name string) int {
|
||||
if len(argv) == 0 {
|
||||
return -1
|
||||
}
|
||||
name = strings.TrimSpace(name)
|
||||
if name == "" {
|
||||
return -1
|
||||
}
|
||||
|
||||
needle := "--" + name
|
||||
prefix := needle + "="
|
||||
last := -1
|
||||
for i, arg := range argv {
|
||||
if arg == needle || strings.HasPrefix(arg, prefix) {
|
||||
last = i
|
||||
}
|
||||
}
|
||||
return last
|
||||
}
|
||||
|
||||
func runParallelMode(cmd *cobra.Command, args []string, opts *cliOptions, v *viper.Viper, name string) int {
|
||||
if len(args) > 0 {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --parallel reads its task configuration from stdin; no positional arguments are allowed.")
|
||||
fmt.Fprintln(os.Stderr, "Usage examples:")
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel < tasks.txt\n", name)
|
||||
fmt.Fprintf(os.Stderr, " echo '...' | %s --parallel\n", name)
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel <<'EOF'\n", name)
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel --full-output <<'EOF' # include full task output\n", name)
|
||||
return 1
|
||||
}
|
||||
|
||||
if cmd.Flags().Changed("agent") || cmd.Flags().Changed("prompt-file") || cmd.Flags().Changed("reasoning-effort") {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --parallel reads its task configuration from stdin; only --backend, --model, --full-output and --skip-permissions are allowed.")
|
||||
return 1
|
||||
}
|
||||
|
||||
backendName := defaultBackendName
|
||||
if cmd.Flags().Changed("backend") {
|
||||
backendName = strings.TrimSpace(opts.Backend)
|
||||
if backendName == "" {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --backend flag requires a value")
|
||||
return 1
|
||||
}
|
||||
} else if val := strings.TrimSpace(v.GetString("backend")); val != "" {
|
||||
backendName = val
|
||||
}
|
||||
|
||||
model := ""
|
||||
if cmd.Flags().Changed("model") {
|
||||
model = strings.TrimSpace(opts.Model)
|
||||
if model == "" {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --model flag requires a value")
|
||||
return 1
|
||||
}
|
||||
} else {
|
||||
model = strings.TrimSpace(v.GetString("model"))
|
||||
}
|
||||
|
||||
fullOutput := opts.FullOutput
|
||||
if !cmd.Flags().Changed("full-output") && v.IsSet("full-output") {
|
||||
fullOutput = v.GetBool("full-output")
|
||||
}
|
||||
|
||||
skipChanged := cmd.Flags().Changed("skip-permissions") || cmd.Flags().Changed("dangerously-skip-permissions")
|
||||
skipPermissions := false
|
||||
if skipChanged {
|
||||
skipPermissions = opts.SkipPermissions
|
||||
} else {
|
||||
skipPermissions = v.GetBool("skip-permissions")
|
||||
}
|
||||
|
||||
backend, err := selectBackendFn(backendName)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
backendName = backend.Name()
|
||||
|
||||
data, err := io.ReadAll(stdinReader)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to read stdin: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
cfg, err := parseParallelConfig(data)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
cfg.GlobalBackend = backendName
|
||||
model = strings.TrimSpace(model)
|
||||
for i := range cfg.Tasks {
|
||||
if strings.TrimSpace(cfg.Tasks[i].Backend) == "" {
|
||||
cfg.Tasks[i].Backend = backendName
|
||||
}
|
||||
if strings.TrimSpace(cfg.Tasks[i].Model) == "" && model != "" {
|
||||
cfg.Tasks[i].Model = model
|
||||
}
|
||||
cfg.Tasks[i].SkipPermissions = cfg.Tasks[i].SkipPermissions || skipPermissions
|
||||
}
|
||||
|
||||
timeoutSec := resolveTimeout()
|
||||
layers, err := topologicalSort(cfg.Tasks)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
results := executeConcurrent(layers, timeoutSec)
|
||||
|
||||
for i := range results {
|
||||
results[i].CoverageTarget = defaultCoverageTarget
|
||||
if results[i].Message == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
lines := strings.Split(results[i].Message, "\n")
|
||||
results[i].Coverage = extractCoverageFromLines(lines)
|
||||
results[i].CoverageNum = extractCoverageNum(results[i].Coverage)
|
||||
results[i].FilesChanged = extractFilesChangedFromLines(lines)
|
||||
results[i].TestsPassed, results[i].TestsFailed = extractTestResultsFromLines(lines)
|
||||
results[i].KeyOutput = extractKeyOutputFromLines(lines, 150)
|
||||
}
|
||||
|
||||
fmt.Println(generateFinalOutputWithMode(results, !fullOutput))
|
||||
|
||||
exitCode := 0
|
||||
for _, res := range results {
|
||||
if res.ExitCode != 0 {
|
||||
exitCode = res.ExitCode
|
||||
}
|
||||
}
|
||||
return exitCode
|
||||
}
|
||||
|
||||
func runSingleMode(cfg *Config, name string) int {
|
||||
backend, err := selectBackendFn(cfg.Backend)
|
||||
if err != nil {
|
||||
logError(err.Error())
|
||||
return 1
|
||||
}
|
||||
cfg.Backend = backend.Name()
|
||||
|
||||
cmdInjected := codexCommand != defaultCodexCommand
|
||||
argsInjected := buildCodexArgsFn != nil && reflect.ValueOf(buildCodexArgsFn).Pointer() != reflect.ValueOf(defaultBuildArgsFn).Pointer()
|
||||
|
||||
if backend.Name() != defaultBackendName || !cmdInjected {
|
||||
codexCommand = backend.Command()
|
||||
}
|
||||
if backend.Name() != defaultBackendName || !argsInjected {
|
||||
buildCodexArgsFn = backend.BuildArgs
|
||||
}
|
||||
logInfo(fmt.Sprintf("Selected backend: %s", backend.Name()))
|
||||
|
||||
timeoutSec := resolveTimeout()
|
||||
logInfo(fmt.Sprintf("Timeout: %ds", timeoutSec))
|
||||
cfg.Timeout = timeoutSec
|
||||
|
||||
var taskText string
|
||||
var piped bool
|
||||
|
||||
if cfg.ExplicitStdin {
|
||||
logInfo("Explicit stdin mode: reading task from stdin")
|
||||
data, err := io.ReadAll(stdinReader)
|
||||
if err != nil {
|
||||
logError("Failed to read stdin: " + err.Error())
|
||||
return 1
|
||||
}
|
||||
taskText = string(data)
|
||||
if taskText == "" {
|
||||
logError("Explicit stdin mode requires task input from stdin")
|
||||
return 1
|
||||
}
|
||||
piped = !isTerminal()
|
||||
} else {
|
||||
pipedTask, err := readPipedTask()
|
||||
if err != nil {
|
||||
logError("Failed to read piped stdin: " + err.Error())
|
||||
return 1
|
||||
}
|
||||
piped = pipedTask != ""
|
||||
if piped {
|
||||
taskText = pipedTask
|
||||
} else {
|
||||
taskText = cfg.Task
|
||||
}
|
||||
}
|
||||
|
||||
if strings.TrimSpace(cfg.PromptFile) != "" {
|
||||
prompt, err := readAgentPromptFile(cfg.PromptFile, cfg.PromptFileExplicit)
|
||||
if err != nil {
|
||||
logError("Failed to read prompt file: " + err.Error())
|
||||
return 1
|
||||
}
|
||||
taskText = wrapTaskWithAgentPrompt(prompt, taskText)
|
||||
}
|
||||
|
||||
useStdin := cfg.ExplicitStdin || shouldUseStdin(taskText, piped)
|
||||
|
||||
targetArg := taskText
|
||||
if useStdin {
|
||||
targetArg = "-"
|
||||
}
|
||||
codexArgs := buildCodexArgsFn(cfg, targetArg)
|
||||
|
||||
logger := activeLogger()
|
||||
if logger == nil {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: logger is not initialized")
|
||||
return 1
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[%s]\n", name)
|
||||
fmt.Fprintf(os.Stderr, " Backend: %s\n", cfg.Backend)
|
||||
fmt.Fprintf(os.Stderr, " Command: %s %s\n", codexCommand, strings.Join(codexArgs, " "))
|
||||
fmt.Fprintf(os.Stderr, " PID: %d\n", os.Getpid())
|
||||
fmt.Fprintf(os.Stderr, " Log: %s\n", logger.Path())
|
||||
|
||||
if useStdin {
|
||||
var reasons []string
|
||||
if piped {
|
||||
reasons = append(reasons, "piped input")
|
||||
}
|
||||
if cfg.ExplicitStdin {
|
||||
reasons = append(reasons, "explicit \"-\"")
|
||||
}
|
||||
if strings.Contains(taskText, "\n") {
|
||||
reasons = append(reasons, "newline")
|
||||
}
|
||||
if strings.Contains(taskText, "\\") {
|
||||
reasons = append(reasons, "backslash")
|
||||
}
|
||||
if strings.Contains(taskText, "\"") {
|
||||
reasons = append(reasons, "double-quote")
|
||||
}
|
||||
if strings.Contains(taskText, "'") {
|
||||
reasons = append(reasons, "single-quote")
|
||||
}
|
||||
if strings.Contains(taskText, "`") {
|
||||
reasons = append(reasons, "backtick")
|
||||
}
|
||||
if strings.Contains(taskText, "$") {
|
||||
reasons = append(reasons, "dollar")
|
||||
}
|
||||
if len(taskText) > 800 {
|
||||
reasons = append(reasons, "length>800")
|
||||
}
|
||||
if len(reasons) > 0 {
|
||||
logWarn(fmt.Sprintf("Using stdin mode for task due to: %s", strings.Join(reasons, ", ")))
|
||||
}
|
||||
}
|
||||
|
||||
logInfo(fmt.Sprintf("%s running...", cfg.Backend))
|
||||
|
||||
taskSpec := TaskSpec{
|
||||
Task: taskText,
|
||||
WorkDir: cfg.WorkDir,
|
||||
Mode: cfg.Mode,
|
||||
SessionID: cfg.SessionID,
|
||||
Model: cfg.Model,
|
||||
ReasoningEffort: cfg.ReasoningEffort,
|
||||
Agent: cfg.Agent,
|
||||
SkipPermissions: cfg.SkipPermissions,
|
||||
UseStdin: useStdin,
|
||||
}
|
||||
|
||||
result := runTaskFn(taskSpec, false, cfg.Timeout)
|
||||
|
||||
if result.ExitCode != 0 {
|
||||
return result.ExitCode
|
||||
}
|
||||
|
||||
// Validate that we got a meaningful output message
|
||||
if strings.TrimSpace(result.Message) == "" {
|
||||
logError(fmt.Sprintf("no output message: backend=%s returned empty result.Message with exit_code=0", cfg.Backend))
|
||||
return 1
|
||||
}
|
||||
|
||||
fmt.Println(result.Message)
|
||||
if result.SessionID != "" {
|
||||
fmt.Printf("\n---\nSESSION_ID: %s\n", result.SessionID)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
@@ -11,9 +11,20 @@ import (
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
func stripTimestampPrefix(line string) string {
|
||||
line = strings.TrimSpace(line)
|
||||
if strings.HasPrefix(line, "{") {
|
||||
var evt struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(line), &evt); err == nil && evt.Message != "" {
|
||||
return evt.Message
|
||||
}
|
||||
}
|
||||
if !strings.HasPrefix(line, "[") {
|
||||
return line
|
||||
}
|
||||
7
codeagent-wrapper/internal/app/config_alias.go
Normal file
7
codeagent-wrapper/internal/app/config_alias.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package wrapper
|
||||
|
||||
import config "codeagent-wrapper/internal/config"
|
||||
|
||||
// Keep the existing Config name throughout the codebase, but source the
|
||||
// implementation from internal/config.
|
||||
type Config = config.Config
|
||||
54
codeagent-wrapper/internal/app/executor_alias.go
Normal file
54
codeagent-wrapper/internal/app/executor_alias.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
backend "codeagent-wrapper/internal/backend"
|
||||
config "codeagent-wrapper/internal/config"
|
||||
executor "codeagent-wrapper/internal/executor"
|
||||
)
|
||||
|
||||
// defaultRunCodexTaskFn is the default implementation of runCodexTaskFn (exposed for test reset).
|
||||
func defaultRunCodexTaskFn(task TaskSpec, timeout int) TaskResult {
|
||||
return executor.DefaultRunCodexTaskFn(task, timeout)
|
||||
}
|
||||
|
||||
var runCodexTaskFn = defaultRunCodexTaskFn
|
||||
|
||||
func topologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) {
|
||||
return executor.TopologicalSort(tasks)
|
||||
}
|
||||
|
||||
func executeConcurrent(layers [][]TaskSpec, timeout int) []TaskResult {
|
||||
maxWorkers := config.ResolveMaxParallelWorkers()
|
||||
return executeConcurrentWithContext(context.Background(), layers, timeout, maxWorkers)
|
||||
}
|
||||
|
||||
func executeConcurrentWithContext(parentCtx context.Context, layers [][]TaskSpec, timeout int, maxWorkers int) []TaskResult {
|
||||
return executor.ExecuteConcurrentWithContext(parentCtx, layers, timeout, maxWorkers, runCodexTaskFn)
|
||||
}
|
||||
|
||||
func generateFinalOutput(results []TaskResult) string {
|
||||
return executor.GenerateFinalOutput(results)
|
||||
}
|
||||
|
||||
func generateFinalOutputWithMode(results []TaskResult, summaryOnly bool) string {
|
||||
return executor.GenerateFinalOutputWithMode(results, summaryOnly)
|
||||
}
|
||||
|
||||
func buildCodexArgs(cfg *Config, targetArg string) []string {
|
||||
return backend.BuildCodexArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
func runCodexTask(taskSpec TaskSpec, silent bool, timeoutSec int) TaskResult {
|
||||
return runCodexTaskWithContext(context.Background(), taskSpec, nil, nil, false, silent, timeoutSec)
|
||||
}
|
||||
|
||||
func runCodexProcess(parentCtx context.Context, codexArgs []string, taskText string, useStdin bool, timeoutSec int) (message, threadID string, exitCode int) {
|
||||
res := runCodexTaskWithContext(parentCtx, TaskSpec{Task: taskText, WorkDir: defaultWorkdir, Mode: "new", UseStdin: useStdin}, nil, codexArgs, true, false, timeoutSec)
|
||||
return res.Message, res.SessionID, res.ExitCode
|
||||
}
|
||||
|
||||
func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backend Backend, customArgs []string, useCustomArgs bool, silent bool, timeoutSec int) TaskResult {
|
||||
return executor.RunCodexTaskWithContext(parentCtx, taskSpec, backend, codexCommand, buildCodexArgsFn, customArgs, useCustomArgs, silent, timeoutSec)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
@@ -10,13 +10,15 @@ import (
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"syscall"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
executor "codeagent-wrapper/internal/executor"
|
||||
)
|
||||
|
||||
var executorTestTaskCounter atomic.Int64
|
||||
@@ -32,7 +34,12 @@ type execFakeProcess struct {
|
||||
mu sync.Mutex
|
||||
}
|
||||
|
||||
func (p *execFakeProcess) Pid() int { return p.pid }
|
||||
func (p *execFakeProcess) Pid() int {
|
||||
if runtime.GOOS == "windows" {
|
||||
return 0
|
||||
}
|
||||
return p.pid
|
||||
}
|
||||
func (p *execFakeProcess) Kill() error {
|
||||
p.killed.Add(1)
|
||||
return nil
|
||||
@@ -84,7 +91,8 @@ func (rc *reasonReadCloser) record(reason string) {
|
||||
|
||||
type execFakeRunner struct {
|
||||
stdout io.ReadCloser
|
||||
process processHandle
|
||||
stderr io.ReadCloser
|
||||
process executor.ProcessHandle
|
||||
stdin io.WriteCloser
|
||||
dir string
|
||||
env map[string]string
|
||||
@@ -92,6 +100,7 @@ type execFakeRunner struct {
|
||||
waitDelay time.Duration
|
||||
startErr error
|
||||
stdoutErr error
|
||||
stderrErr error
|
||||
stdinErr error
|
||||
allowNilProcess bool
|
||||
started atomic.Bool
|
||||
@@ -119,6 +128,15 @@ func (f *execFakeRunner) StdoutPipe() (io.ReadCloser, error) {
|
||||
}
|
||||
return f.stdout, nil
|
||||
}
|
||||
func (f *execFakeRunner) StderrPipe() (io.ReadCloser, error) {
|
||||
if f.stderrErr != nil {
|
||||
return nil, f.stderrErr
|
||||
}
|
||||
if f.stderr == nil {
|
||||
f.stderr = io.NopCloser(strings.NewReader(""))
|
||||
}
|
||||
return f.stderr, nil
|
||||
}
|
||||
func (f *execFakeRunner) StdinPipe() (io.WriteCloser, error) {
|
||||
if f.stdinErr != nil {
|
||||
return nil, f.stdinErr
|
||||
@@ -141,7 +159,7 @@ func (f *execFakeRunner) SetEnv(env map[string]string) {
|
||||
f.env[k] = v
|
||||
}
|
||||
}
|
||||
func (f *execFakeRunner) Process() processHandle {
|
||||
func (f *execFakeRunner) Process() executor.ProcessHandle {
|
||||
if f.process != nil {
|
||||
return f.process
|
||||
}
|
||||
@@ -151,220 +169,15 @@ func (f *execFakeRunner) Process() processHandle {
|
||||
return &execFakeProcess{pid: 1}
|
||||
}
|
||||
|
||||
func TestExecutorHelperCoverage(t *testing.T) {
|
||||
t.Run("realCmdAndProcess", func(t *testing.T) {
|
||||
rc := &realCmd{}
|
||||
if err := rc.Start(); err == nil {
|
||||
t.Fatalf("expected error for nil command")
|
||||
}
|
||||
if err := rc.Wait(); err == nil {
|
||||
t.Fatalf("expected error for nil command")
|
||||
}
|
||||
if _, err := rc.StdoutPipe(); err == nil {
|
||||
t.Fatalf("expected error for nil command")
|
||||
}
|
||||
if _, err := rc.StdinPipe(); err == nil {
|
||||
t.Fatalf("expected error for nil command")
|
||||
}
|
||||
rc.SetStderr(io.Discard)
|
||||
if rc.Process() != nil {
|
||||
t.Fatalf("expected nil process")
|
||||
}
|
||||
rcWithCmd := &realCmd{cmd: &exec.Cmd{}}
|
||||
rcWithCmd.SetStderr(io.Discard)
|
||||
rcWithCmd.SetDir("/tmp")
|
||||
if rcWithCmd.cmd.Dir != "/tmp" {
|
||||
t.Fatalf("expected SetDir to set cmd.Dir, got %q", rcWithCmd.cmd.Dir)
|
||||
}
|
||||
echoCmd := exec.Command("echo", "ok")
|
||||
rcProc := &realCmd{cmd: echoCmd}
|
||||
stdoutPipe, err := rcProc.StdoutPipe()
|
||||
if err != nil {
|
||||
t.Fatalf("StdoutPipe error: %v", err)
|
||||
}
|
||||
stdinPipe, err := rcProc.StdinPipe()
|
||||
if err != nil {
|
||||
t.Fatalf("StdinPipe error: %v", err)
|
||||
}
|
||||
rcProc.SetStderr(io.Discard)
|
||||
if err := rcProc.Start(); err != nil {
|
||||
t.Fatalf("Start failed: %v", err)
|
||||
}
|
||||
_, _ = stdinPipe.Write([]byte{})
|
||||
_ = stdinPipe.Close()
|
||||
procHandle := rcProc.Process()
|
||||
if procHandle == nil {
|
||||
t.Fatalf("expected process handle")
|
||||
}
|
||||
_ = procHandle.Signal(syscall.SIGTERM)
|
||||
_ = procHandle.Kill()
|
||||
_ = rcProc.Wait()
|
||||
_, _ = io.ReadAll(stdoutPipe)
|
||||
|
||||
rp := &realProcess{}
|
||||
if rp.Pid() != 0 {
|
||||
t.Fatalf("nil process should have pid 0")
|
||||
}
|
||||
if rp.Kill() != nil {
|
||||
t.Fatalf("nil process Kill should be nil")
|
||||
}
|
||||
if rp.Signal(syscall.SIGTERM) != nil {
|
||||
t.Fatalf("nil process Signal should be nil")
|
||||
}
|
||||
rpLive := &realProcess{proc: &os.Process{Pid: 99}}
|
||||
if rpLive.Pid() != 99 {
|
||||
t.Fatalf("expected pid 99, got %d", rpLive.Pid())
|
||||
}
|
||||
_ = rpLive.Kill()
|
||||
_ = rpLive.Signal(syscall.SIGTERM)
|
||||
})
|
||||
|
||||
t.Run("topologicalSortAndSkip", func(t *testing.T) {
|
||||
layers, err := topologicalSort([]TaskSpec{{ID: "root"}, {ID: "child", Dependencies: []string{"root"}}})
|
||||
if err != nil || len(layers) != 2 {
|
||||
t.Fatalf("unexpected topological sort result: layers=%d err=%v", len(layers), err)
|
||||
}
|
||||
if _, err := topologicalSort([]TaskSpec{{ID: "cycle", Dependencies: []string{"cycle"}}}); err == nil {
|
||||
t.Fatalf("expected cycle detection error")
|
||||
}
|
||||
|
||||
failed := map[string]TaskResult{"root": {ExitCode: 1}}
|
||||
if skip, _ := shouldSkipTask(TaskSpec{ID: "child", Dependencies: []string{"root"}}, failed); !skip {
|
||||
t.Fatalf("should skip when dependency failed")
|
||||
}
|
||||
if skip, _ := shouldSkipTask(TaskSpec{ID: "leaf"}, failed); skip {
|
||||
t.Fatalf("should not skip task without dependencies")
|
||||
}
|
||||
if skip, _ := shouldSkipTask(TaskSpec{ID: "child-ok", Dependencies: []string{"root"}}, map[string]TaskResult{}); skip {
|
||||
t.Fatalf("should not skip when dependencies succeeded")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("cancelledTaskResult", func(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
res := cancelledTaskResult("t1", ctx)
|
||||
if res.ExitCode != 130 {
|
||||
t.Fatalf("expected cancel exit code, got %d", res.ExitCode)
|
||||
}
|
||||
|
||||
timeoutCtx, timeoutCancel := context.WithTimeout(context.Background(), 0)
|
||||
defer timeoutCancel()
|
||||
res = cancelledTaskResult("t2", timeoutCtx)
|
||||
if res.ExitCode != 124 {
|
||||
t.Fatalf("expected timeout exit code, got %d", res.ExitCode)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("generateFinalOutputAndArgs", func(t *testing.T) {
|
||||
const key = "CODEX_BYPASS_SANDBOX"
|
||||
t.Cleanup(func() { os.Unsetenv(key) })
|
||||
os.Unsetenv(key)
|
||||
|
||||
out := generateFinalOutput([]TaskResult{
|
||||
{TaskID: "ok", ExitCode: 0},
|
||||
{TaskID: "fail", ExitCode: 1, Error: "boom"},
|
||||
})
|
||||
if !strings.Contains(out, "ok") || !strings.Contains(out, "fail") {
|
||||
t.Fatalf("unexpected summary output: %s", out)
|
||||
}
|
||||
// Test summary mode (default) - should have new format with ### headers
|
||||
out = generateFinalOutput([]TaskResult{{TaskID: "rich", ExitCode: 0, SessionID: "sess", LogPath: "/tmp/log", Message: "hello"}})
|
||||
if !strings.Contains(out, "### rich") {
|
||||
t.Fatalf("summary output missing task header: %s", out)
|
||||
}
|
||||
// Test full output mode - should have Session and Message
|
||||
out = generateFinalOutputWithMode([]TaskResult{{TaskID: "rich", ExitCode: 0, SessionID: "sess", LogPath: "/tmp/log", Message: "hello"}}, false)
|
||||
if !strings.Contains(out, "Session: sess") || !strings.Contains(out, "Log: /tmp/log") || !strings.Contains(out, "hello") {
|
||||
t.Fatalf("full output missing fields: %s", out)
|
||||
}
|
||||
|
||||
args := buildCodexArgs(&Config{Mode: "new", WorkDir: "/tmp"}, "task")
|
||||
if !slices.Equal(args, []string{"e", "--skip-git-repo-check", "-C", "/tmp", "--json", "task"}) {
|
||||
t.Fatalf("unexpected codex args: %+v", args)
|
||||
}
|
||||
args = buildCodexArgs(&Config{Mode: "resume", SessionID: "sess"}, "target")
|
||||
if !slices.Equal(args, []string{"e", "--skip-git-repo-check", "--json", "resume", "sess", "target"}) {
|
||||
t.Fatalf("unexpected resume args: %+v", args)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("generateFinalOutputASCIIMode", func(t *testing.T) {
|
||||
t.Setenv("CODEAGENT_ASCII_MODE", "true")
|
||||
|
||||
results := []TaskResult{
|
||||
{TaskID: "ok", ExitCode: 0, Coverage: "92%", CoverageNum: 92, CoverageTarget: 90, KeyOutput: "done"},
|
||||
{TaskID: "warn", ExitCode: 0, Coverage: "80%", CoverageNum: 80, CoverageTarget: 90, KeyOutput: "did"},
|
||||
{TaskID: "bad", ExitCode: 2, Error: "boom"},
|
||||
}
|
||||
out := generateFinalOutput(results)
|
||||
|
||||
for _, sym := range []string{"PASS", "WARN", "FAIL"} {
|
||||
if !strings.Contains(out, sym) {
|
||||
t.Fatalf("ASCII mode should include %q, got: %s", sym, out)
|
||||
}
|
||||
}
|
||||
for _, sym := range []string{"✓", "⚠️", "✗"} {
|
||||
if strings.Contains(out, sym) {
|
||||
t.Fatalf("ASCII mode should not include %q, got: %s", sym, out)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("generateFinalOutputUnicodeMode", func(t *testing.T) {
|
||||
t.Setenv("CODEAGENT_ASCII_MODE", "false")
|
||||
|
||||
results := []TaskResult{
|
||||
{TaskID: "ok", ExitCode: 0, Coverage: "92%", CoverageNum: 92, CoverageTarget: 90, KeyOutput: "done"},
|
||||
{TaskID: "warn", ExitCode: 0, Coverage: "80%", CoverageNum: 80, CoverageTarget: 90, KeyOutput: "did"},
|
||||
{TaskID: "bad", ExitCode: 2, Error: "boom"},
|
||||
}
|
||||
out := generateFinalOutput(results)
|
||||
|
||||
for _, sym := range []string{"✓", "⚠️", "✗"} {
|
||||
if !strings.Contains(out, sym) {
|
||||
t.Fatalf("Unicode mode should include %q, got: %s", sym, out)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("executeConcurrentWrapper", func(t *testing.T) {
|
||||
orig := runCodexTaskFn
|
||||
defer func() { runCodexTaskFn = orig }()
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 0, Message: "done"}
|
||||
}
|
||||
os.Setenv("CODEAGENT_MAX_PARALLEL_WORKERS", "1")
|
||||
defer os.Unsetenv("CODEAGENT_MAX_PARALLEL_WORKERS")
|
||||
|
||||
results := executeConcurrent([][]TaskSpec{{{ID: "wrap"}}}, 1)
|
||||
if len(results) != 1 || results[0].TaskID != "wrap" {
|
||||
t.Fatalf("unexpected wrapper results: %+v", results)
|
||||
}
|
||||
|
||||
unbounded := executeConcurrentWithContext(context.Background(), [][]TaskSpec{{{ID: "unbounded"}}}, 1, 0)
|
||||
if len(unbounded) != 1 || unbounded[0].ExitCode != 0 {
|
||||
t.Fatalf("unexpected unbounded result: %+v", unbounded)
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
cancelled := executeConcurrentWithContext(ctx, [][]TaskSpec{{{ID: "cancel"}}}, 1, 1)
|
||||
if cancelled[0].ExitCode == 0 {
|
||||
t.Fatalf("expected cancelled result, got %+v", cancelled[0])
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
origRunner := newCommandRunner
|
||||
defer func() { newCommandRunner = origRunner }()
|
||||
defer resetTestHooks()
|
||||
|
||||
t.Run("resumeMissingSessionID", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
t.Fatalf("unexpected command execution for invalid resume config")
|
||||
return nil
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: ".", Mode: "resume"}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 || !strings.Contains(res.Error, "session_id") {
|
||||
@@ -374,13 +187,14 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
|
||||
t.Run("success", func(t *testing.T) {
|
||||
var firstStdout *reasonReadCloser
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
rc := newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"hello"}}`)
|
||||
if firstStdout == nil {
|
||||
firstStdout = rc
|
||||
}
|
||||
return &execFakeRunner{stdout: rc, process: &execFakeProcess{pid: 1234}}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{ID: "task-1", Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.Error != "" || res.Message != "hello" || res.ExitCode != 0 {
|
||||
@@ -410,17 +224,18 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("startErrors", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{startErr: errors.New("executable file not found"), process: &execFakeProcess{pid: 1}}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.ExitCode != 127 {
|
||||
t.Fatalf("expected missing executable exit code, got %d", res.ExitCode)
|
||||
}
|
||||
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{startErr: errors.New("start failed"), process: &execFakeProcess{pid: 2}}
|
||||
}
|
||||
})
|
||||
res = runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected non-zero exit on start failure")
|
||||
@@ -428,13 +243,14 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("timeoutAndPipes", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"slow"}}`),
|
||||
process: &execFakeProcess{pid: 5},
|
||||
waitDelay: 20 * time.Millisecond,
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: ".", UseStdin: true}, nil, nil, false, false, 0)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected timeout result, got %+v", res)
|
||||
@@ -442,17 +258,18 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("pipeErrors", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{stdoutErr: errors.New("stdout fail"), process: &execFakeProcess{pid: 6}}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected failure on stdout pipe error")
|
||||
}
|
||||
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{stdinErr: errors.New("stdin fail"), process: &execFakeProcess{pid: 7}}
|
||||
}
|
||||
})
|
||||
res = runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: ".", UseStdin: true}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected failure on stdin pipe error")
|
||||
@@ -465,13 +282,14 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
if exitErr == nil {
|
||||
t.Fatalf("expected exec.ExitError")
|
||||
}
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"ignored"}}`),
|
||||
process: &execFakeProcess{pid: 8},
|
||||
waitErr: exitErr,
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected non-zero exit on wait error")
|
||||
@@ -479,13 +297,14 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("contextCancelled", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"cancel"}}`),
|
||||
process: &execFakeProcess{pid: 9},
|
||||
waitDelay: 10 * time.Millisecond,
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
res := runCodexTaskWithContext(ctx, TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
@@ -495,12 +314,13 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("silentLogger", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"quiet"}}`),
|
||||
process: &execFakeProcess{pid: 10},
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
_ = closeLogger()
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, true, 1)
|
||||
if res.ExitCode != 0 || res.LogPath == "" {
|
||||
@@ -510,12 +330,13 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("injectedLogger", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"injected"}}`),
|
||||
process: &execFakeProcess{pid: 12},
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
_ = closeLogger()
|
||||
|
||||
injected, err := NewLoggerWithSuffix("executor-injected")
|
||||
@@ -527,7 +348,7 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
_ = os.Remove(injected.Path())
|
||||
}()
|
||||
|
||||
ctx := withTaskLogger(context.Background(), injected)
|
||||
ctx := executor.WithTaskLogger(context.Background(), injected)
|
||||
res := runCodexTaskWithContext(ctx, TaskSpec{ID: "task-injected", Task: "payload", WorkDir: "."}, nil, nil, false, true, 1)
|
||||
if res.ExitCode != 0 || res.LogPath != injected.Path() {
|
||||
t.Fatalf("expected injected logger path, got %+v", res)
|
||||
@@ -547,12 +368,13 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("contextLoggerWithoutParent", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"ctx"}}`),
|
||||
process: &execFakeProcess{pid: 14},
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
_ = closeLogger()
|
||||
|
||||
taskLogger, err := NewLoggerWithSuffix("executor-taskctx")
|
||||
@@ -564,8 +386,8 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
_ = os.Remove(taskLogger.Path())
|
||||
})
|
||||
|
||||
ctx := withTaskLogger(context.Background(), taskLogger)
|
||||
res := runCodexTaskWithContext(nil, TaskSpec{ID: "task-context", Task: "payload", WorkDir: ".", Context: ctx}, nil, nil, false, true, 1)
|
||||
ctx := executor.WithTaskLogger(context.Background(), taskLogger)
|
||||
res := runCodexTaskWithContext(context.TODO(), TaskSpec{ID: "task-context", Task: "payload", WorkDir: ".", Context: ctx}, nil, nil, false, true, 1)
|
||||
if res.ExitCode != 0 || res.LogPath != taskLogger.Path() {
|
||||
t.Fatalf("expected task logger to be reused from spec context, got %+v", res)
|
||||
}
|
||||
@@ -585,16 +407,17 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
|
||||
t.Run("backendSetsDirAndNilContext", func(t *testing.T) {
|
||||
var rc *execFakeRunner
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
rc = &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"backend"}}`),
|
||||
process: &execFakeProcess{pid: 13},
|
||||
}
|
||||
return rc
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
|
||||
_ = closeLogger()
|
||||
res := runCodexTaskWithContext(nil, TaskSpec{ID: "task-backend", Task: "payload", WorkDir: "/tmp"}, ClaudeBackend{}, nil, false, false, 1)
|
||||
res := runCodexTaskWithContext(context.TODO(), TaskSpec{ID: "task-backend", Task: "payload", WorkDir: "/tmp"}, ClaudeBackend{}, nil, false, false, 1)
|
||||
if res.ExitCode != 0 || res.Message != "backend" {
|
||||
t.Fatalf("unexpected result: %+v", res)
|
||||
}
|
||||
@@ -603,13 +426,36 @@ func TestExecutorRunCodexTaskWithContext(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("claudeSkipPermissionsPropagatesFromTaskSpec", func(t *testing.T) {
|
||||
t.Setenv("CODEAGENT_SKIP_PERMISSIONS", "false")
|
||||
var gotArgs []string
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
gotArgs = append([]string(nil), args...)
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`),
|
||||
process: &execFakeProcess{pid: 15},
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
|
||||
_ = closeLogger()
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{ID: "task-skip", Task: "payload", WorkDir: ".", SkipPermissions: true}, ClaudeBackend{}, nil, false, false, 1)
|
||||
if res.ExitCode != 0 || res.Error != "" {
|
||||
t.Fatalf("unexpected result: %+v", res)
|
||||
}
|
||||
if !slices.Contains(gotArgs, "--dangerously-skip-permissions") {
|
||||
t.Fatalf("expected --dangerously-skip-permissions in args, got %v", gotArgs)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("missingMessage", func(t *testing.T) {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
executor.SetNewCommandRunner(func(ctx context.Context, name string, args ...string) executor.CommandRunner {
|
||||
return &execFakeRunner{
|
||||
stdout: newReasonReadCloser(`{"type":"item.completed","item":{"type":"task","text":"noop"}}`),
|
||||
process: &execFakeProcess{pid: 11},
|
||||
}
|
||||
}
|
||||
})
|
||||
t.Cleanup(func() { executor.SetNewCommandRunner(nil) })
|
||||
res := runCodexTaskWithContext(context.Background(), TaskSpec{Task: "payload", WorkDir: "."}, nil, nil, false, false, 1)
|
||||
if res.ExitCode == 0 {
|
||||
t.Fatalf("expected failure when no agent_message returned")
|
||||
@@ -635,7 +481,7 @@ func TestExecutorParallelLogIsolation(t *testing.T) {
|
||||
|
||||
origRun := runCodexTaskFn
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
logger := taskLoggerFromContext(task.Context)
|
||||
logger := executor.TaskLoggerFromContext(task.Context)
|
||||
if logger == nil {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 1, Error: "missing task logger"}
|
||||
}
|
||||
@@ -659,7 +505,7 @@ func TestExecutorParallelLogIsolation(t *testing.T) {
|
||||
os.Stderr = stderrW
|
||||
defer func() { os.Stderr = oldStderr }()
|
||||
|
||||
results := executeConcurrentWithContext(nil, [][]TaskSpec{{{ID: taskA}, {ID: taskB}}}, 1, -1)
|
||||
results := executeConcurrentWithContext(context.TODO(), [][]TaskSpec{{{ID: taskA}, {ID: taskB}}}, 1, -1)
|
||||
|
||||
_ = stderrW.Close()
|
||||
os.Stderr = oldStderr
|
||||
@@ -725,7 +571,7 @@ func TestConcurrentExecutorParallelLogIsolationAndClosure(t *testing.T) {
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
|
||||
oldArgs := os.Args
|
||||
os.Args = []string{defaultWrapperName}
|
||||
os.Args = []string{wrapperName}
|
||||
t.Cleanup(func() { os.Args = oldArgs })
|
||||
|
||||
mainLogger, err := NewLoggerWithSuffix("concurrent-main")
|
||||
@@ -771,7 +617,7 @@ func TestConcurrentExecutorParallelLogIsolationAndClosure(t *testing.T) {
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
readyCh <- struct{}{}
|
||||
|
||||
logger := taskLoggerFromContext(task.Context)
|
||||
logger := executor.TaskLoggerFromContext(task.Context)
|
||||
loggerCh <- taskLoggerInfo{taskID: task.ID, logger: logger}
|
||||
if logger == nil {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 1, Error: "missing task logger"}
|
||||
@@ -858,15 +704,9 @@ func TestConcurrentExecutorParallelLogIsolationAndClosure(t *testing.T) {
|
||||
}
|
||||
|
||||
for taskID, logger := range loggers {
|
||||
if !logger.closed.Load() {
|
||||
if !logger.IsClosed() {
|
||||
t.Fatalf("expected task logger to be closed for %q", taskID)
|
||||
}
|
||||
if logger.file == nil {
|
||||
t.Fatalf("expected task logger file to be non-nil for %q", taskID)
|
||||
}
|
||||
if _, err := logger.file.Write([]byte("x")); err == nil {
|
||||
t.Fatalf("expected task logger file to be closed for %q", taskID)
|
||||
}
|
||||
}
|
||||
|
||||
mainLogger.Flush()
|
||||
@@ -936,10 +776,10 @@ func parseTaskIDFromLogLine(line string) (string, bool) {
|
||||
}
|
||||
|
||||
func TestExecutorTaskLoggerContext(t *testing.T) {
|
||||
if taskLoggerFromContext(nil) != nil {
|
||||
t.Fatalf("expected nil logger from nil context")
|
||||
if executor.TaskLoggerFromContext(context.TODO()) != nil {
|
||||
t.Fatalf("expected nil logger from TODO context")
|
||||
}
|
||||
if taskLoggerFromContext(context.Background()) != nil {
|
||||
if executor.TaskLoggerFromContext(context.Background()) != nil {
|
||||
t.Fatalf("expected nil logger when context has no logger")
|
||||
}
|
||||
|
||||
@@ -952,12 +792,12 @@ func TestExecutorTaskLoggerContext(t *testing.T) {
|
||||
_ = os.Remove(logger.Path())
|
||||
}()
|
||||
|
||||
ctx := withTaskLogger(context.Background(), logger)
|
||||
if got := taskLoggerFromContext(ctx); got != logger {
|
||||
ctx := executor.WithTaskLogger(context.Background(), logger)
|
||||
if got := executor.TaskLoggerFromContext(ctx); got != logger {
|
||||
t.Fatalf("expected logger roundtrip, got %v", got)
|
||||
}
|
||||
|
||||
if taskLoggerFromContext(withTaskLogger(context.Background(), nil)) != nil {
|
||||
if executor.TaskLoggerFromContext(executor.WithTaskLogger(context.Background(), nil)) != nil {
|
||||
t.Fatalf("expected nil logger when injected logger is nil")
|
||||
}
|
||||
}
|
||||
@@ -1114,7 +954,7 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
|
||||
orig := runCodexTaskFn
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
logger := taskLoggerFromContext(task.Context)
|
||||
logger := executor.TaskLoggerFromContext(task.Context)
|
||||
if logger != mainLogger {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 1, Error: "unexpected logger"}
|
||||
}
|
||||
@@ -1148,9 +988,6 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
if res.LogPath != mainLogger.Path() {
|
||||
t.Fatalf("shared log path mismatch: got %q want %q", res.LogPath, mainLogger.Path())
|
||||
}
|
||||
if !res.sharedLog {
|
||||
t.Fatalf("expected sharedLog flag for %+v", res)
|
||||
}
|
||||
if !strings.Contains(stderrOut, "Log (shared)") {
|
||||
t.Fatalf("stderr missing shared marker: %s", stderrOut)
|
||||
}
|
||||
@@ -1179,7 +1016,7 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
|
||||
orig := runCodexTaskFn
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
logger := taskLoggerFromContext(task.Context)
|
||||
logger := executor.TaskLoggerFromContext(task.Context)
|
||||
if logger == nil {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 1, Error: "missing logger"}
|
||||
}
|
||||
@@ -1217,7 +1054,14 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read log %q: %v", res.LogPath, err)
|
||||
}
|
||||
if !strings.Contains(string(data), "TASK="+res.TaskID) {
|
||||
found := false
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
if strings.Contains(stripTimestampPrefix(line), "TASK="+res.TaskID) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !found {
|
||||
t.Fatalf("log for %q missing task marker, content: %s", res.TaskID, string(data))
|
||||
}
|
||||
_ = os.Remove(res.LogPath)
|
||||
@@ -1225,147 +1069,6 @@ func TestExecutorExecuteConcurrentWithContextBranches(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestExecutorSignalAndTermination(t *testing.T) {
|
||||
forceKillDelay.Store(0)
|
||||
defer forceKillDelay.Store(5)
|
||||
|
||||
proc := &execFakeProcess{pid: 42}
|
||||
cmd := &execFakeRunner{process: proc}
|
||||
|
||||
origNotify := signalNotifyFn
|
||||
origStop := signalStopFn
|
||||
defer func() {
|
||||
signalNotifyFn = origNotify
|
||||
signalStopFn = origStop
|
||||
}()
|
||||
|
||||
signalNotifyFn = func(c chan<- os.Signal, sigs ...os.Signal) {
|
||||
go func() { c <- syscall.SIGINT }()
|
||||
}
|
||||
signalStopFn = func(c chan<- os.Signal) {}
|
||||
|
||||
forwardSignals(context.Background(), cmd, func(string) {})
|
||||
time.Sleep(20 * time.Millisecond)
|
||||
|
||||
proc.mu.Lock()
|
||||
signalled := len(proc.signals)
|
||||
proc.mu.Unlock()
|
||||
if signalled == 0 {
|
||||
t.Fatalf("process did not receive signal")
|
||||
}
|
||||
if proc.killed.Load() == 0 {
|
||||
t.Fatalf("process was not killed after signal")
|
||||
}
|
||||
|
||||
timer := terminateProcess(cmd)
|
||||
if timer == nil {
|
||||
t.Fatalf("terminateProcess returned nil timer")
|
||||
}
|
||||
timer.Stop()
|
||||
|
||||
ft := terminateCommand(cmd)
|
||||
if ft == nil {
|
||||
t.Fatalf("terminateCommand returned nil")
|
||||
}
|
||||
ft.Stop()
|
||||
|
||||
cmdKill := &execFakeRunner{process: &execFakeProcess{pid: 50}}
|
||||
ftKill := terminateCommand(cmdKill)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
if p, ok := cmdKill.process.(*execFakeProcess); ok && p.killed.Load() == 0 {
|
||||
t.Fatalf("terminateCommand did not kill process")
|
||||
}
|
||||
ftKill.Stop()
|
||||
|
||||
cmdKill2 := &execFakeRunner{process: &execFakeProcess{pid: 51}}
|
||||
timer2 := terminateProcess(cmdKill2)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
if p, ok := cmdKill2.process.(*execFakeProcess); ok && p.killed.Load() == 0 {
|
||||
t.Fatalf("terminateProcess did not kill process")
|
||||
}
|
||||
timer2.Stop()
|
||||
|
||||
if terminateCommand(nil) != nil {
|
||||
t.Fatalf("terminateCommand should return nil for nil cmd")
|
||||
}
|
||||
if terminateCommand(&execFakeRunner{allowNilProcess: true}) != nil {
|
||||
t.Fatalf("terminateCommand should return nil when process is nil")
|
||||
}
|
||||
if terminateProcess(nil) != nil {
|
||||
t.Fatalf("terminateProcess should return nil for nil cmd")
|
||||
}
|
||||
if terminateProcess(&execFakeRunner{allowNilProcess: true}) != nil {
|
||||
t.Fatalf("terminateProcess should return nil when process is nil")
|
||||
}
|
||||
|
||||
signalNotifyFn = func(c chan<- os.Signal, sigs ...os.Signal) {}
|
||||
ctxDone, cancelDone := context.WithCancel(context.Background())
|
||||
cancelDone()
|
||||
forwardSignals(ctxDone, &execFakeRunner{process: &execFakeProcess{pid: 70}}, func(string) {})
|
||||
}
|
||||
|
||||
func TestExecutorCancelReasonAndCloseWithReason(t *testing.T) {
|
||||
if reason := cancelReason("", nil); !strings.Contains(reason, "Context") {
|
||||
t.Fatalf("unexpected cancelReason for nil ctx: %s", reason)
|
||||
}
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 0)
|
||||
defer cancel()
|
||||
if !strings.Contains(cancelReason("cmd", ctx), "timeout") {
|
||||
t.Fatalf("expected timeout reason")
|
||||
}
|
||||
cancelCtx, cancelFn := context.WithCancel(context.Background())
|
||||
cancelFn()
|
||||
if !strings.Contains(cancelReason("cmd", cancelCtx), "Execution cancelled") {
|
||||
t.Fatalf("expected cancellation reason")
|
||||
}
|
||||
if !strings.Contains(cancelReason("", cancelCtx), "codex") {
|
||||
t.Fatalf("expected default command name in cancel reason")
|
||||
}
|
||||
|
||||
rc := &reasonReadCloser{r: strings.NewReader("data"), closedC: make(chan struct{}, 1)}
|
||||
closeWithReason(rc, "why")
|
||||
select {
|
||||
case <-rc.closedC:
|
||||
default:
|
||||
t.Fatalf("CloseWithReason was not called")
|
||||
}
|
||||
|
||||
plain := io.NopCloser(strings.NewReader("x"))
|
||||
closeWithReason(plain, "noop")
|
||||
closeWithReason(nil, "noop")
|
||||
}
|
||||
|
||||
func TestExecutorForceKillTimerStop(t *testing.T) {
|
||||
done := make(chan struct{}, 1)
|
||||
ft := &forceKillTimer{timer: time.AfterFunc(50*time.Millisecond, func() { done <- struct{}{} }), done: done}
|
||||
ft.Stop()
|
||||
|
||||
done2 := make(chan struct{}, 1)
|
||||
ft2 := &forceKillTimer{timer: time.AfterFunc(0, func() { done2 <- struct{}{} }), done: done2}
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
ft2.Stop()
|
||||
|
||||
var nilTimer *forceKillTimer
|
||||
nilTimer.Stop()
|
||||
(&forceKillTimer{}).Stop()
|
||||
}
|
||||
|
||||
func TestExecutorForwardSignalsDefaults(t *testing.T) {
|
||||
origNotify := signalNotifyFn
|
||||
origStop := signalStopFn
|
||||
signalNotifyFn = nil
|
||||
signalStopFn = nil
|
||||
defer func() {
|
||||
signalNotifyFn = origNotify
|
||||
signalStopFn = origStop
|
||||
}()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
cancel()
|
||||
forwardSignals(ctx, &execFakeRunner{process: &execFakeProcess{pid: 80}}, func(string) {})
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
|
||||
func TestExecutorSharedLogFalseWhenCustomLogPath(t *testing.T) {
|
||||
devNull, err := os.OpenFile(os.DevNull, os.O_WRONLY, 0)
|
||||
if err != nil {
|
||||
@@ -1421,10 +1124,9 @@ func TestExecutorSharedLogFalseWhenCustomLogPath(t *testing.T) {
|
||||
}
|
||||
|
||||
res := results[0]
|
||||
// 关键断言:即使 handle.shared=true(因为 task logger 创建失败),
|
||||
// 但因为 LogPath 不等于主 logger 的路径,sharedLog 应为 false
|
||||
if res.sharedLog {
|
||||
t.Fatalf("expected sharedLog=false when LogPath differs from shared logger, got true")
|
||||
out := generateFinalOutputWithMode(results, false)
|
||||
if strings.Contains(out, "(shared)") {
|
||||
t.Fatalf("did not expect shared marker when LogPath differs from shared logger, got: %s", out)
|
||||
}
|
||||
|
||||
// 验证 LogPath 确实是自定义的
|
||||
26
codeagent-wrapper/internal/app/logger.go
Normal file
26
codeagent-wrapper/internal/app/logger.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package wrapper
|
||||
|
||||
import ilogger "codeagent-wrapper/internal/logger"
|
||||
|
||||
type Logger = ilogger.Logger
|
||||
type CleanupStats = ilogger.CleanupStats
|
||||
|
||||
func NewLogger() (*Logger, error) { return ilogger.NewLogger() }
|
||||
|
||||
func NewLoggerWithSuffix(suffix string) (*Logger, error) { return ilogger.NewLoggerWithSuffix(suffix) }
|
||||
|
||||
func setLogger(l *Logger) { ilogger.SetLogger(l) }
|
||||
|
||||
func closeLogger() error { return ilogger.CloseLogger() }
|
||||
|
||||
func activeLogger() *Logger { return ilogger.ActiveLogger() }
|
||||
|
||||
func logInfo(msg string) { ilogger.LogInfo(msg) }
|
||||
|
||||
func logWarn(msg string) { ilogger.LogWarn(msg) }
|
||||
|
||||
func logError(msg string) { ilogger.LogError(msg) }
|
||||
|
||||
func cleanupOldLogs() (CleanupStats, error) { return ilogger.CleanupOldLogs() }
|
||||
|
||||
func sanitizeLogSuffix(raw string) string { return ilogger.SanitizeLogSuffix(raw) }
|
||||
@@ -1,7 +1,8 @@
|
||||
package main
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"codeagent-wrapper/internal/logger"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
@@ -36,7 +37,9 @@ func captureStdout(t *testing.T, fn func()) string {
|
||||
os.Stdout = old
|
||||
|
||||
var buf bytes.Buffer
|
||||
io.Copy(&buf, r)
|
||||
if _, err := io.Copy(&buf, r); err != nil {
|
||||
t.Fatalf("io.Copy() error = %v", err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
@@ -57,11 +60,17 @@ func parseIntegrationOutput(t *testing.T, out string) integrationOutput {
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if strings.HasSuffix(p, "tasks") {
|
||||
fmt.Sscanf(p, "%d tasks", &payload.Summary.Total)
|
||||
if _, err := fmt.Sscanf(p, "%d tasks", &payload.Summary.Total); err != nil {
|
||||
t.Fatalf("failed to parse total tasks from %q: %v", p, err)
|
||||
}
|
||||
} else if strings.HasSuffix(p, "passed") {
|
||||
fmt.Sscanf(p, "%d passed", &payload.Summary.Success)
|
||||
if _, err := fmt.Sscanf(p, "%d passed", &payload.Summary.Success); err != nil {
|
||||
t.Fatalf("failed to parse passed tasks from %q: %v", p, err)
|
||||
}
|
||||
} else if strings.HasSuffix(p, "failed") {
|
||||
fmt.Sscanf(p, "%d failed", &payload.Summary.Failed)
|
||||
if _, err := fmt.Sscanf(p, "%d failed", &payload.Summary.Failed); err != nil {
|
||||
t.Fatalf("failed to parse failed tasks from %q: %v", p, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if strings.HasPrefix(line, "Total:") {
|
||||
@@ -70,11 +79,17 @@ func parseIntegrationOutput(t *testing.T, out string) integrationOutput {
|
||||
for _, p := range parts {
|
||||
p = strings.TrimSpace(p)
|
||||
if strings.HasPrefix(p, "Total:") {
|
||||
fmt.Sscanf(p, "Total: %d", &payload.Summary.Total)
|
||||
if _, err := fmt.Sscanf(p, "Total: %d", &payload.Summary.Total); err != nil {
|
||||
t.Fatalf("failed to parse total tasks from %q: %v", p, err)
|
||||
}
|
||||
} else if strings.HasPrefix(p, "Success:") {
|
||||
fmt.Sscanf(p, "Success: %d", &payload.Summary.Success)
|
||||
if _, err := fmt.Sscanf(p, "Success: %d", &payload.Summary.Success); err != nil {
|
||||
t.Fatalf("failed to parse passed tasks from %q: %v", p, err)
|
||||
}
|
||||
} else if strings.HasPrefix(p, "Failed:") {
|
||||
fmt.Sscanf(p, "Failed: %d", &payload.Summary.Failed)
|
||||
if _, err := fmt.Sscanf(p, "Failed: %d", &payload.Summary.Failed); err != nil {
|
||||
t.Fatalf("failed to parse failed tasks from %q: %v", p, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if line == "## Task Results" {
|
||||
@@ -94,34 +109,39 @@ func parseIntegrationOutput(t *testing.T, out string) integrationOutput {
|
||||
currentTask = &TaskResult{}
|
||||
|
||||
taskLine := strings.TrimPrefix(line, "### ")
|
||||
success, warning, failed := getStatusSymbols()
|
||||
// Parse different formats
|
||||
if strings.Contains(taskLine, " "+success) {
|
||||
parts := strings.Split(taskLine, " "+success)
|
||||
parseMarker := func(marker string, exitCode int) bool {
|
||||
needle := " " + marker
|
||||
if !strings.Contains(taskLine, needle) {
|
||||
return false
|
||||
}
|
||||
parts := strings.Split(taskLine, needle)
|
||||
currentTask.TaskID = strings.TrimSpace(parts[0])
|
||||
currentTask.ExitCode = 0
|
||||
// Extract coverage if present
|
||||
if len(parts) > 1 {
|
||||
currentTask.ExitCode = exitCode
|
||||
if exitCode == 0 && len(parts) > 1 {
|
||||
coveragePart := strings.TrimSpace(parts[1])
|
||||
if strings.HasSuffix(coveragePart, "%") {
|
||||
currentTask.Coverage = coveragePart
|
||||
}
|
||||
}
|
||||
} else if strings.Contains(taskLine, " "+warning) {
|
||||
parts := strings.Split(taskLine, " "+warning)
|
||||
currentTask.TaskID = strings.TrimSpace(parts[0])
|
||||
currentTask.ExitCode = 0
|
||||
} else if strings.Contains(taskLine, " "+failed) {
|
||||
parts := strings.Split(taskLine, " "+failed)
|
||||
currentTask.TaskID = strings.TrimSpace(parts[0])
|
||||
currentTask.ExitCode = 1
|
||||
} else {
|
||||
return true
|
||||
}
|
||||
|
||||
switch {
|
||||
case parseMarker("✓", 0), parseMarker("PASS", 0):
|
||||
// ok
|
||||
case parseMarker("⚠️", 0), parseMarker("WARN", 0):
|
||||
// warning
|
||||
case parseMarker("✗", 1), parseMarker("FAIL", 1):
|
||||
// fail
|
||||
default:
|
||||
currentTask.TaskID = taskLine
|
||||
}
|
||||
} else if currentTask != nil && inTaskResults {
|
||||
// Parse task details
|
||||
if strings.HasPrefix(line, "Exit code:") {
|
||||
fmt.Sscanf(line, "Exit code: %d", ¤tTask.ExitCode)
|
||||
if _, err := fmt.Sscanf(line, "Exit code: %d", ¤tTask.ExitCode); err != nil {
|
||||
t.Fatalf("failed to parse exit code from %q: %v", line, err)
|
||||
}
|
||||
} else if strings.HasPrefix(line, "Error:") {
|
||||
currentTask.Error = strings.TrimPrefix(line, "Error: ")
|
||||
} else if strings.HasPrefix(line, "Log:") {
|
||||
@@ -147,7 +167,9 @@ func parseIntegrationOutput(t *testing.T, out string) integrationOutput {
|
||||
currentTask.ExitCode = 0
|
||||
} else if strings.HasPrefix(line, "Status: FAILED") {
|
||||
if strings.Contains(line, "exit code") {
|
||||
fmt.Sscanf(line, "Status: FAILED (exit code %d)", ¤tTask.ExitCode)
|
||||
if _, err := fmt.Sscanf(line, "Status: FAILED (exit code %d)", ¤tTask.ExitCode); err != nil {
|
||||
t.Fatalf("failed to parse exit code from %q: %v", line, err)
|
||||
}
|
||||
} else {
|
||||
currentTask.ExitCode = 1
|
||||
}
|
||||
@@ -169,32 +191,6 @@ func parseIntegrationOutput(t *testing.T, out string) integrationOutput {
|
||||
return payload
|
||||
}
|
||||
|
||||
func extractTaskBlock(t *testing.T, output, taskID string) string {
|
||||
t.Helper()
|
||||
header := fmt.Sprintf("--- Task: %s ---", taskID)
|
||||
lines := strings.Split(output, "\n")
|
||||
var block []string
|
||||
collecting := false
|
||||
for _, raw := range lines {
|
||||
trimmed := strings.TrimSpace(raw)
|
||||
if !collecting {
|
||||
if trimmed == header {
|
||||
collecting = true
|
||||
block = append(block, trimmed)
|
||||
}
|
||||
continue
|
||||
}
|
||||
if strings.HasPrefix(trimmed, "--- Task: ") && trimmed != header {
|
||||
break
|
||||
}
|
||||
block = append(block, trimmed)
|
||||
}
|
||||
if len(block) == 0 {
|
||||
t.Fatalf("task block %s not found in output:\n%s", taskID, output)
|
||||
}
|
||||
return strings.Join(block, "\n")
|
||||
}
|
||||
|
||||
func findResultByID(t *testing.T, payload integrationOutput, id string) TaskResult {
|
||||
t.Helper()
|
||||
for _, res := range payload.Results {
|
||||
@@ -206,6 +202,37 @@ func findResultByID(t *testing.T, payload integrationOutput, id string) TaskResu
|
||||
return TaskResult{}
|
||||
}
|
||||
|
||||
func setTempDirEnv(t *testing.T, dir string) string {
|
||||
t.Helper()
|
||||
resolved := dir
|
||||
if eval, err := filepath.EvalSymlinks(dir); err == nil {
|
||||
resolved = eval
|
||||
}
|
||||
t.Setenv("TMPDIR", resolved)
|
||||
t.Setenv("TEMP", resolved)
|
||||
t.Setenv("TMP", resolved)
|
||||
return resolved
|
||||
}
|
||||
|
||||
func createTempLog(t *testing.T, dir, name string) string {
|
||||
t.Helper()
|
||||
path := filepath.Join(dir, name)
|
||||
if err := os.WriteFile(path, []byte("test"), 0o644); err != nil {
|
||||
t.Fatalf("failed to create temp log %s: %v", path, err)
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
func stubProcessRunning(t *testing.T, fn func(int) bool) {
|
||||
t.Helper()
|
||||
t.Cleanup(logger.SetProcessRunningCheck(fn))
|
||||
}
|
||||
|
||||
func stubProcessStartTime(t *testing.T, fn func(int) time.Time) {
|
||||
t.Helper()
|
||||
t.Cleanup(logger.SetProcessStartTimeFn(fn))
|
||||
}
|
||||
|
||||
func TestRunParallelEndToEnd_OrderAndConcurrency(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
origRun := runCodexTaskFn
|
||||
@@ -391,7 +418,7 @@ id: beta
|
||||
---CONTENT---
|
||||
task-beta`
|
||||
stdinReader = bytes.NewReader([]byte(input))
|
||||
os.Args = []string{"codex-wrapper", "--parallel"}
|
||||
os.Args = []string{"codeagent-wrapper", "--parallel"}
|
||||
|
||||
var exitCode int
|
||||
output := captureStdout(t, func() {
|
||||
@@ -444,9 +471,9 @@ id: d
|
||||
---CONTENT---
|
||||
ok-d`
|
||||
stdinReader = bytes.NewReader([]byte(input))
|
||||
os.Args = []string{"codex-wrapper", "--parallel"}
|
||||
os.Args = []string{"codeagent-wrapper", "--parallel"}
|
||||
|
||||
expectedLog := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid()))
|
||||
expectedLog := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
|
||||
origRun := runCodexTaskFn
|
||||
runCodexTaskFn = func(task TaskSpec, timeout int) TaskResult {
|
||||
@@ -500,9 +527,9 @@ ok-d`
|
||||
|
||||
// After parallel log isolation fix, each task has its own log file
|
||||
expectedLines := map[string]struct{}{
|
||||
fmt.Sprintf("Task a: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d-a.log", os.Getpid()))): {},
|
||||
fmt.Sprintf("Task b: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d-b.log", os.Getpid()))): {},
|
||||
fmt.Sprintf("Task d: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d-d.log", os.Getpid()))): {},
|
||||
fmt.Sprintf("Task a: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d-a.log", os.Getpid()))): {},
|
||||
fmt.Sprintf("Task b: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d-b.log", os.Getpid()))): {},
|
||||
fmt.Sprintf("Task d: Log: %s", filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d-d.log", os.Getpid()))): {},
|
||||
}
|
||||
|
||||
if len(taskLines) != len(expectedLines) {
|
||||
@@ -520,7 +547,7 @@ func TestRunNonParallelOutputsIncludeLogPathsIntegration(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
os.Args = []string{"codex-wrapper", "integration-log-check"}
|
||||
os.Args = []string{"codeagent-wrapper", "integration-log-check"}
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
codexCommand = "echo"
|
||||
@@ -538,7 +565,7 @@ func TestRunNonParallelOutputsIncludeLogPathsIntegration(t *testing.T) {
|
||||
if exitCode != 0 {
|
||||
t.Fatalf("run() exit=%d, want 0", exitCode)
|
||||
}
|
||||
expectedLog := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid()))
|
||||
expectedLog := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
wantLine := fmt.Sprintf("Log: %s", expectedLog)
|
||||
if !strings.Contains(stderr, wantLine) {
|
||||
t.Fatalf("stderr missing %q, got: %q", wantLine, stderr)
|
||||
@@ -641,7 +668,6 @@ func TestRunParallelTimeoutPropagation(t *testing.T) {
|
||||
t.Cleanup(func() {
|
||||
runCodexTaskFn = origRun
|
||||
resetTestHooks()
|
||||
os.Unsetenv("CODEX_TIMEOUT")
|
||||
})
|
||||
|
||||
var receivedTimeout int
|
||||
@@ -650,7 +676,7 @@ func TestRunParallelTimeoutPropagation(t *testing.T) {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 124, Error: "timeout"}
|
||||
}
|
||||
|
||||
os.Setenv("CODEX_TIMEOUT", "1")
|
||||
t.Setenv("CODEX_TIMEOUT", "1")
|
||||
input := `---TASK---
|
||||
id: T
|
||||
---CONTENT---
|
||||
@@ -720,11 +746,11 @@ func TestRunStartupCleanupRemovesOrphansEndToEnd(t *testing.T) {
|
||||
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
orphanA := createTempLog(t, tempDir, "codex-wrapper-5001.log")
|
||||
orphanB := createTempLog(t, tempDir, "codex-wrapper-5002-extra.log")
|
||||
orphanC := createTempLog(t, tempDir, "codex-wrapper-5003-suffix.log")
|
||||
orphanA := createTempLog(t, tempDir, "codeagent-wrapper-5001.log")
|
||||
orphanB := createTempLog(t, tempDir, "codeagent-wrapper-5002-extra.log")
|
||||
orphanC := createTempLog(t, tempDir, "codeagent-wrapper-5003-suffix.log")
|
||||
runningPID := 81234
|
||||
runningLog := createTempLog(t, tempDir, fmt.Sprintf("codex-wrapper-%d.log", runningPID))
|
||||
runningLog := createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", runningPID))
|
||||
unrelated := createTempLog(t, tempDir, "wrapper.log")
|
||||
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
@@ -740,7 +766,7 @@ func TestRunStartupCleanupRemovesOrphansEndToEnd(t *testing.T) {
|
||||
codexCommand = createFakeCodexScript(t, "tid-startup", "ok")
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
os.Args = []string{"codex-wrapper", "task"}
|
||||
os.Args = []string{"codeagent-wrapper", "task"}
|
||||
|
||||
if exit := run(); exit != 0 {
|
||||
t.Fatalf("run() exit=%d, want 0", exit)
|
||||
@@ -766,7 +792,7 @@ func TestRunStartupCleanupConcurrentWrappers(t *testing.T) {
|
||||
|
||||
const totalLogs = 40
|
||||
for i := 0; i < totalLogs; i++ {
|
||||
createTempLog(t, tempDir, fmt.Sprintf("codex-wrapper-%d.log", 9000+i))
|
||||
createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", 9000+i))
|
||||
}
|
||||
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
@@ -790,7 +816,7 @@ func TestRunStartupCleanupConcurrentWrappers(t *testing.T) {
|
||||
close(start)
|
||||
wg.Wait()
|
||||
|
||||
matches, err := filepath.Glob(filepath.Join(tempDir, "codex-wrapper-*.log"))
|
||||
matches, err := filepath.Glob(filepath.Join(tempDir, "codeagent-wrapper-*.log"))
|
||||
if err != nil {
|
||||
t.Fatalf("glob error: %v", err)
|
||||
}
|
||||
@@ -804,9 +830,9 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
staleA := createTempLog(t, tempDir, "codex-wrapper-2100.log")
|
||||
staleB := createTempLog(t, tempDir, "codex-wrapper-2200-extra.log")
|
||||
keeper := createTempLog(t, tempDir, "codex-wrapper-2300.log")
|
||||
staleA := createTempLog(t, tempDir, "codeagent-wrapper-2100.log")
|
||||
staleB := createTempLog(t, tempDir, "codeagent-wrapper-2200-extra.log")
|
||||
keeper := createTempLog(t, tempDir, "codeagent-wrapper-2300.log")
|
||||
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
return pid == 2300 || pid == os.Getpid()
|
||||
@@ -818,7 +844,7 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
return time.Time{}
|
||||
})
|
||||
|
||||
os.Args = []string{"codex-wrapper", "--cleanup"}
|
||||
os.Args = []string{"codeagent-wrapper", "--cleanup"}
|
||||
|
||||
var exitCode int
|
||||
output := captureStdout(t, func() {
|
||||
@@ -842,10 +868,10 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
if !strings.Contains(output, "Files kept: 1") {
|
||||
t.Fatalf("missing 'Files kept: 1' in output: %q", output)
|
||||
}
|
||||
if !strings.Contains(output, "codex-wrapper-2100.log") || !strings.Contains(output, "codex-wrapper-2200-extra.log") {
|
||||
if !strings.Contains(output, "codeagent-wrapper-2100.log") || !strings.Contains(output, "codeagent-wrapper-2200-extra.log") {
|
||||
t.Fatalf("missing deleted file names in output: %q", output)
|
||||
}
|
||||
if !strings.Contains(output, "codex-wrapper-2300.log") {
|
||||
if !strings.Contains(output, "codeagent-wrapper-2300.log") {
|
||||
t.Fatalf("missing kept file names in output: %q", output)
|
||||
}
|
||||
|
||||
@@ -858,7 +884,7 @@ func TestRunCleanupFlagEndToEnd_Success(t *testing.T) {
|
||||
t.Fatalf("expected kept log to remain, err=%v", err)
|
||||
}
|
||||
|
||||
currentLog := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid()))
|
||||
currentLog := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
if _, err := os.Stat(currentLog); err == nil {
|
||||
t.Fatalf("cleanup mode should not create new log file %s", currentLog)
|
||||
} else if !os.IsNotExist(err) {
|
||||
@@ -877,7 +903,7 @@ func TestRunCleanupFlagEndToEnd_FailureDoesNotAffectStartup(t *testing.T) {
|
||||
return CleanupStats{Scanned: 1}, fmt.Errorf("permission denied")
|
||||
}
|
||||
|
||||
os.Args = []string{"codex-wrapper", "--cleanup"}
|
||||
os.Args = []string{"codeagent-wrapper", "--cleanup"}
|
||||
|
||||
var exitCode int
|
||||
errOutput := captureStderr(t, func() {
|
||||
@@ -894,7 +920,7 @@ func TestRunCleanupFlagEndToEnd_FailureDoesNotAffectStartup(t *testing.T) {
|
||||
t.Fatalf("cleanup called %d times, want 1", calls)
|
||||
}
|
||||
|
||||
currentLog := filepath.Join(tempDir, fmt.Sprintf("codex-wrapper-%d.log", os.Getpid()))
|
||||
currentLog := filepath.Join(tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", os.Getpid()))
|
||||
if _, err := os.Stat(currentLog); err == nil {
|
||||
t.Fatalf("cleanup failure should not create new log file %s", currentLog)
|
||||
} else if !os.IsNotExist(err) {
|
||||
@@ -907,7 +933,7 @@ func TestRunCleanupFlagEndToEnd_FailureDoesNotAffectStartup(t *testing.T) {
|
||||
codexCommand = createFakeCodexScript(t, "tid-cleanup-e2e", "ok")
|
||||
stdinReader = strings.NewReader("")
|
||||
isTerminalFn = func() bool { return true }
|
||||
os.Args = []string{"codex-wrapper", "post-cleanup task"}
|
||||
os.Args = []string{"codeagent-wrapper", "post-cleanup task"}
|
||||
|
||||
var normalExit int
|
||||
normalOutput := captureStdout(t, func() {
|
||||
File diff suppressed because it is too large
Load Diff
9
codeagent-wrapper/internal/app/parallel_config.go
Normal file
9
codeagent-wrapper/internal/app/parallel_config.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
executor "codeagent-wrapper/internal/executor"
|
||||
)
|
||||
|
||||
func parseParallelConfig(data []byte) (*ParallelConfig, error) {
|
||||
return executor.ParseParallelConfig(data)
|
||||
}
|
||||
34
codeagent-wrapper/internal/app/parser.go
Normal file
34
codeagent-wrapper/internal/app/parser.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
|
||||
parser "codeagent-wrapper/internal/parser"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
func parseJSONStream(r io.Reader) (message, threadID string) {
|
||||
return parseJSONStreamWithLog(r, logWarn, logInfo)
|
||||
}
|
||||
|
||||
func parseJSONStreamWithWarn(r io.Reader, warnFn func(string)) (message, threadID string) {
|
||||
return parseJSONStreamWithLog(r, warnFn, logInfo)
|
||||
}
|
||||
|
||||
func parseJSONStreamWithLog(r io.Reader, warnFn func(string), infoFn func(string)) (message, threadID string) {
|
||||
return parseJSONStreamInternal(r, warnFn, infoFn, nil, nil)
|
||||
}
|
||||
|
||||
func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(string), onMessage func(), onComplete func()) (message, threadID string) {
|
||||
return parser.ParseJSONStreamInternal(r, warnFn, infoFn, onMessage, onComplete)
|
||||
}
|
||||
|
||||
func hasKey(m map[string]json.RawMessage, key string) bool { return parser.HasKey(m, key) }
|
||||
|
||||
func discardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Reader, error) {
|
||||
return parser.DiscardInvalidJSON(decoder, reader)
|
||||
}
|
||||
|
||||
func normalizeText(text interface{}) string { return parser.NormalizeText(text) }
|
||||
8
codeagent-wrapper/internal/app/task_types.go
Normal file
8
codeagent-wrapper/internal/app/task_types.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package wrapper
|
||||
|
||||
import executor "codeagent-wrapper/internal/executor"
|
||||
|
||||
// Type aliases to keep existing names in the wrapper package.
|
||||
type ParallelConfig = executor.ParallelConfig
|
||||
type TaskSpec = executor.TaskSpec
|
||||
type TaskResult = executor.TaskResult
|
||||
30
codeagent-wrapper/internal/app/terminal_test.go
Normal file
30
codeagent-wrapper/internal/app/terminal_test.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDefaultIsTerminalCoverage(t *testing.T) {
|
||||
oldStdin := os.Stdin
|
||||
t.Cleanup(func() { os.Stdin = oldStdin })
|
||||
|
||||
f, err := os.CreateTemp(t.TempDir(), "stdin-*")
|
||||
if err != nil {
|
||||
t.Fatalf("os.CreateTemp() error = %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
|
||||
os.Stdin = f
|
||||
if got := defaultIsTerminal(); got {
|
||||
t.Fatalf("defaultIsTerminal() = %v, want false for regular file", got)
|
||||
}
|
||||
|
||||
if err := f.Close(); err != nil {
|
||||
t.Fatalf("Close() error = %v", err)
|
||||
}
|
||||
os.Stdin = f
|
||||
if got := defaultIsTerminal(); !got {
|
||||
t.Fatalf("defaultIsTerminal() = %v, want true when Stat fails", got)
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -7,6 +7,8 @@ import (
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
utils "codeagent-wrapper/internal/utils"
|
||||
)
|
||||
|
||||
func resolveTimeout() int {
|
||||
@@ -52,7 +54,7 @@ func shouldUseStdin(taskText string, piped bool) bool {
|
||||
if len(taskText) > 800 {
|
||||
return true
|
||||
}
|
||||
return strings.IndexAny(taskText, stdinSpecialChars) >= 0
|
||||
return strings.ContainsAny(taskText, stdinSpecialChars)
|
||||
}
|
||||
|
||||
func defaultIsTerminal() bool {
|
||||
@@ -196,69 +198,21 @@ func (b *tailBuffer) String() string {
|
||||
}
|
||||
|
||||
func truncate(s string, maxLen int) string {
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
if maxLen < 0 {
|
||||
return ""
|
||||
}
|
||||
return s[:maxLen] + "..."
|
||||
return utils.Truncate(s, maxLen)
|
||||
}
|
||||
|
||||
// safeTruncate safely truncates string to maxLen, avoiding panic and UTF-8 corruption.
|
||||
func safeTruncate(s string, maxLen int) string {
|
||||
if maxLen <= 0 || s == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
runes := []rune(s)
|
||||
if len(runes) <= maxLen {
|
||||
return s
|
||||
}
|
||||
|
||||
if maxLen < 4 {
|
||||
return string(runes[:1])
|
||||
}
|
||||
|
||||
cutoff := maxLen - 3
|
||||
if cutoff <= 0 {
|
||||
return string(runes[:1])
|
||||
}
|
||||
if len(runes) <= cutoff {
|
||||
return s
|
||||
}
|
||||
return string(runes[:cutoff]) + "..."
|
||||
return utils.SafeTruncate(s, maxLen)
|
||||
}
|
||||
|
||||
// sanitizeOutput removes ANSI escape sequences and control characters.
|
||||
func sanitizeOutput(s string) string {
|
||||
var result strings.Builder
|
||||
inEscape := false
|
||||
for i := 0; i < len(s); i++ {
|
||||
if s[i] == '\x1b' && i+1 < len(s) && s[i+1] == '[' {
|
||||
inEscape = true
|
||||
i++ // skip '['
|
||||
continue
|
||||
}
|
||||
if inEscape {
|
||||
if (s[i] >= 'A' && s[i] <= 'Z') || (s[i] >= 'a' && s[i] <= 'z') {
|
||||
inEscape = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Keep printable chars and common whitespace.
|
||||
if s[i] >= 32 || s[i] == '\n' || s[i] == '\t' {
|
||||
result.WriteByte(s[i])
|
||||
}
|
||||
}
|
||||
return result.String()
|
||||
return utils.SanitizeOutput(s)
|
||||
}
|
||||
|
||||
func min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
return utils.Min(a, b)
|
||||
}
|
||||
|
||||
func hello() string {
|
||||
@@ -273,30 +227,6 @@ func farewell(name string) string {
|
||||
return "goodbye " + name
|
||||
}
|
||||
|
||||
// extractMessageSummary extracts a brief summary from task output
|
||||
// Returns first meaningful line or truncated content up to maxLen chars
|
||||
func extractMessageSummary(message string, maxLen int) string {
|
||||
if message == "" || maxLen <= 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Try to find a meaningful summary line
|
||||
lines := strings.Split(message, "\n")
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
// Skip empty lines and common noise
|
||||
if line == "" || strings.HasPrefix(line, "```") || strings.HasPrefix(line, "---") {
|
||||
continue
|
||||
}
|
||||
// Found a meaningful line
|
||||
return safeTruncate(line, maxLen)
|
||||
}
|
||||
|
||||
// Fallback: truncate entire message
|
||||
clean := strings.TrimSpace(message)
|
||||
return safeTruncate(clean, maxLen)
|
||||
}
|
||||
|
||||
// extractCoverageFromLines extracts coverage from pre-split lines.
|
||||
func extractCoverageFromLines(lines []string) string {
|
||||
if len(lines) == 0 {
|
||||
@@ -405,7 +335,7 @@ func extractFilesChangedFromLines(lines []string) []string {
|
||||
for _, prefix := range []string{"Modified:", "Created:", "Updated:", "Edited:", "Wrote:", "Changed:"} {
|
||||
if strings.HasPrefix(line, prefix) {
|
||||
file := strings.TrimSpace(strings.TrimPrefix(line, prefix))
|
||||
file = strings.Trim(file, "`,\"'()[],:")
|
||||
file = strings.Trim(file, "`\"'()[],:")
|
||||
file = strings.TrimPrefix(file, "@")
|
||||
if file != "" && !seen[file] {
|
||||
files = append(files, file)
|
||||
@@ -422,7 +352,7 @@ func extractFilesChangedFromLines(lines []string) []string {
|
||||
// Pattern 2: Tokens that look like file paths (allow root files, strip @ prefix).
|
||||
parts := strings.Fields(line)
|
||||
for _, part := range parts {
|
||||
part = strings.Trim(part, "`,\"'()[],:")
|
||||
part = strings.Trim(part, "`\"'()[],:")
|
||||
part = strings.TrimPrefix(part, "@")
|
||||
for _, ext := range exts {
|
||||
if strings.HasSuffix(part, ext) && !seen[part] {
|
||||
@@ -591,125 +521,3 @@ func extractKeyOutputFromLines(lines []string, maxLen int) string {
|
||||
clean := strings.TrimSpace(strings.Join(lines, "\n"))
|
||||
return safeTruncate(clean, maxLen)
|
||||
}
|
||||
|
||||
// extractKeyOutput extracts a brief summary of what the task accomplished
|
||||
// Looks for summary lines, first meaningful sentence, or truncates message
|
||||
func extractKeyOutput(message string, maxLen int) string {
|
||||
if message == "" || maxLen <= 0 {
|
||||
return ""
|
||||
}
|
||||
return extractKeyOutputFromLines(strings.Split(message, "\n"), maxLen)
|
||||
}
|
||||
|
||||
// extractCoverageGap extracts what's missing from coverage reports
|
||||
// Looks for uncovered lines, branches, or functions
|
||||
func extractCoverageGap(message string) string {
|
||||
if message == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
lower := strings.ToLower(message)
|
||||
lines := strings.Split(message, "\n")
|
||||
|
||||
// Look for uncovered/missing patterns
|
||||
for _, line := range lines {
|
||||
lineLower := strings.ToLower(line)
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
// Common patterns for uncovered code
|
||||
if strings.Contains(lineLower, "uncovered") ||
|
||||
strings.Contains(lineLower, "not covered") ||
|
||||
strings.Contains(lineLower, "missing coverage") ||
|
||||
strings.Contains(lineLower, "lines not covered") {
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
// Look for specific file:line patterns in coverage reports
|
||||
if strings.Contains(lineLower, "branch") && strings.Contains(lineLower, "not taken") {
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
}
|
||||
|
||||
// Look for function names that aren't covered
|
||||
if strings.Contains(lower, "function") && strings.Contains(lower, "0%") {
|
||||
for _, line := range lines {
|
||||
if strings.Contains(strings.ToLower(line), "0%") && strings.Contains(line, "function") {
|
||||
line = strings.TrimSpace(line)
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// extractErrorDetail extracts meaningful error context from task output
|
||||
// Returns the most relevant error information up to maxLen characters
|
||||
func extractErrorDetail(message string, maxLen int) string {
|
||||
if message == "" || maxLen <= 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
lines := strings.Split(message, "\n")
|
||||
var errorLines []string
|
||||
|
||||
// Look for error-related lines
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
lower := strings.ToLower(line)
|
||||
|
||||
// Skip noise lines
|
||||
if strings.HasPrefix(line, "at ") && strings.Contains(line, "(") {
|
||||
// Stack trace line - only keep first one
|
||||
if len(errorLines) > 0 && strings.HasPrefix(strings.ToLower(errorLines[len(errorLines)-1]), "at ") {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
// Prioritize error/fail lines
|
||||
if strings.Contains(lower, "error") ||
|
||||
strings.Contains(lower, "fail") ||
|
||||
strings.Contains(lower, "exception") ||
|
||||
strings.Contains(lower, "assert") ||
|
||||
strings.Contains(lower, "expected") ||
|
||||
strings.Contains(lower, "timeout") ||
|
||||
strings.Contains(lower, "not found") ||
|
||||
strings.Contains(lower, "cannot") ||
|
||||
strings.Contains(lower, "undefined") ||
|
||||
strings.HasPrefix(line, "FAIL") ||
|
||||
strings.HasPrefix(line, "●") {
|
||||
errorLines = append(errorLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if len(errorLines) == 0 {
|
||||
// No specific error lines found, take last few lines
|
||||
start := len(lines) - 5
|
||||
if start < 0 {
|
||||
start = 0
|
||||
}
|
||||
for _, line := range lines[start:] {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
errorLines = append(errorLines, line)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Join and truncate
|
||||
result := strings.Join(errorLines, " | ")
|
||||
return safeTruncate(result, maxLen)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package wrapper
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
9
codeagent-wrapper/internal/app/wrapper_name.go
Normal file
9
codeagent-wrapper/internal/app/wrapper_name.go
Normal file
@@ -0,0 +1,9 @@
|
||||
package wrapper
|
||||
|
||||
import ilogger "codeagent-wrapper/internal/logger"
|
||||
|
||||
const wrapperName = ilogger.WrapperName
|
||||
|
||||
func currentWrapperName() string { return ilogger.CurrentWrapperName() }
|
||||
|
||||
func primaryLogPrefix() string { return ilogger.PrimaryLogPrefix() }
|
||||
33
codeagent-wrapper/internal/backend/backend.go
Normal file
33
codeagent-wrapper/internal/backend/backend.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package backend
|
||||
|
||||
import config "codeagent-wrapper/internal/config"
|
||||
|
||||
// Backend defines the contract for invoking different AI CLI backends.
|
||||
// Each backend is responsible for supplying the executable command and
|
||||
// building the argument list based on the wrapper config.
|
||||
type Backend interface {
|
||||
Name() string
|
||||
BuildArgs(cfg *config.Config, targetArg string) []string
|
||||
Command() string
|
||||
Env(baseURL, apiKey string) map[string]string
|
||||
}
|
||||
|
||||
var (
|
||||
logWarnFn = func(string) {}
|
||||
logErrorFn = func(string) {}
|
||||
)
|
||||
|
||||
// SetLogFuncs configures optional logging hooks used by some backends.
|
||||
// Callers can safely pass nil to disable the hook.
|
||||
func SetLogFuncs(warnFn, errorFn func(string)) {
|
||||
if warnFn != nil {
|
||||
logWarnFn = warnFn
|
||||
} else {
|
||||
logWarnFn = func(string) {}
|
||||
}
|
||||
if errorFn != nil {
|
||||
logErrorFn = errorFn
|
||||
} else {
|
||||
logErrorFn = func(string) {}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -6,13 +6,16 @@ import (
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
backend := ClaudeBackend{}
|
||||
|
||||
t.Run("new mode omits skip-permissions by default", func(t *testing.T) {
|
||||
cfg := &Config{Mode: "new", WorkDir: "/repo"}
|
||||
t.Run("new mode omits skip-permissions when env disabled", func(t *testing.T) {
|
||||
t.Setenv("CODEAGENT_SKIP_PERMISSIONS", "false")
|
||||
cfg := &config.Config{Mode: "new", WorkDir: "/repo"}
|
||||
got := backend.BuildArgs(cfg, "todo")
|
||||
want := []string{"-p", "--setting-sources", "", "--output-format", "stream-json", "--verbose", "todo"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -20,8 +23,8 @@ func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("new mode can opt-in skip-permissions", func(t *testing.T) {
|
||||
cfg := &Config{Mode: "new", SkipPermissions: true}
|
||||
t.Run("new mode includes skip-permissions by default", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new", SkipPermissions: false}
|
||||
got := backend.BuildArgs(cfg, "-")
|
||||
want := []string{"-p", "--dangerously-skip-permissions", "--setting-sources", "", "--output-format", "stream-json", "--verbose", "-"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -30,7 +33,8 @@ func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("resume mode includes session id", func(t *testing.T) {
|
||||
cfg := &Config{Mode: "resume", SessionID: "sid-123", WorkDir: "/ignored"}
|
||||
t.Setenv("CODEAGENT_SKIP_PERMISSIONS", "false")
|
||||
cfg := &config.Config{Mode: "resume", SessionID: "sid-123", WorkDir: "/ignored"}
|
||||
got := backend.BuildArgs(cfg, "resume-task")
|
||||
want := []string{"-p", "--setting-sources", "", "-r", "sid-123", "--output-format", "stream-json", "--verbose", "resume-task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -39,7 +43,8 @@ func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("resume mode without session still returns base flags", func(t *testing.T) {
|
||||
cfg := &Config{Mode: "resume", WorkDir: "/ignored"}
|
||||
t.Setenv("CODEAGENT_SKIP_PERMISSIONS", "false")
|
||||
cfg := &config.Config{Mode: "resume", WorkDir: "/ignored"}
|
||||
got := backend.BuildArgs(cfg, "follow-up")
|
||||
want := []string{"-p", "--setting-sources", "", "--output-format", "stream-json", "--verbose", "follow-up"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -48,7 +53,7 @@ func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("resume mode can opt-in skip permissions", func(t *testing.T) {
|
||||
cfg := &Config{Mode: "resume", SessionID: "sid-123", SkipPermissions: true}
|
||||
cfg := &config.Config{Mode: "resume", SessionID: "sid-123", SkipPermissions: true}
|
||||
got := backend.BuildArgs(cfg, "resume-task")
|
||||
want := []string{"-p", "--dangerously-skip-permissions", "--setting-sources", "", "-r", "sid-123", "--output-format", "stream-json", "--verbose", "resume-task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -63,12 +68,48 @@ func TestClaudeBuildArgs_ModesAndPermissions(t *testing.T) {
|
||||
})
|
||||
}
|
||||
|
||||
func TestBackendBuildArgs_Model(t *testing.T) {
|
||||
t.Run("claude includes --model when set", func(t *testing.T) {
|
||||
t.Setenv("CODEAGENT_SKIP_PERMISSIONS", "false")
|
||||
backend := ClaudeBackend{}
|
||||
cfg := &config.Config{Mode: "new", Model: "opus"}
|
||||
got := backend.BuildArgs(cfg, "todo")
|
||||
want := []string{"-p", "--setting-sources", "", "--model", "opus", "--output-format", "stream-json", "--verbose", "todo"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("gemini includes -m when set", func(t *testing.T) {
|
||||
backend := GeminiBackend{}
|
||||
cfg := &config.Config{Mode: "new", Model: "gemini-3-pro-preview"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"-o", "stream-json", "-y", "-m", "gemini-3-pro-preview", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("codex includes --model when set", func(t *testing.T) {
|
||||
const key = "CODEX_BYPASS_SANDBOX"
|
||||
t.Setenv(key, "false")
|
||||
|
||||
backend := CodexBackend{}
|
||||
cfg := &config.Config{Mode: "new", WorkDir: "/tmp", Model: "o3"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"e", "--model", "o3", "--skip-git-repo-check", "-C", "/tmp", "--json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestClaudeBuildArgs_GeminiAndCodexModes(t *testing.T) {
|
||||
t.Run("gemini new mode defaults workdir", func(t *testing.T) {
|
||||
backend := GeminiBackend{}
|
||||
cfg := &Config{Mode: "new", WorkDir: "/workspace"}
|
||||
cfg := &config.Config{Mode: "new", WorkDir: "/workspace"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"-o", "stream-json", "-y", "-p", "task"}
|
||||
want := []string{"-o", "stream-json", "-y", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
@@ -76,9 +117,9 @@ func TestClaudeBuildArgs_GeminiAndCodexModes(t *testing.T) {
|
||||
|
||||
t.Run("gemini resume mode uses session id", func(t *testing.T) {
|
||||
backend := GeminiBackend{}
|
||||
cfg := &Config{Mode: "resume", SessionID: "sid-999"}
|
||||
cfg := &config.Config{Mode: "resume", SessionID: "sid-999"}
|
||||
got := backend.BuildArgs(cfg, "resume")
|
||||
want := []string{"-o", "stream-json", "-y", "-r", "sid-999", "-p", "resume"}
|
||||
want := []string{"-o", "stream-json", "-y", "-r", "sid-999", "resume"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
@@ -86,9 +127,9 @@ func TestClaudeBuildArgs_GeminiAndCodexModes(t *testing.T) {
|
||||
|
||||
t.Run("gemini resume mode without session omits identifier", func(t *testing.T) {
|
||||
backend := GeminiBackend{}
|
||||
cfg := &Config{Mode: "resume"}
|
||||
cfg := &config.Config{Mode: "resume"}
|
||||
got := backend.BuildArgs(cfg, "resume")
|
||||
want := []string{"-o", "stream-json", "-y", "-p", "resume"}
|
||||
want := []string{"-o", "stream-json", "-y", "resume"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
@@ -101,13 +142,22 @@ func TestClaudeBuildArgs_GeminiAndCodexModes(t *testing.T) {
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("gemini stdin mode uses -p flag", func(t *testing.T) {
|
||||
backend := GeminiBackend{}
|
||||
cfg := &config.Config{Mode: "new"}
|
||||
got := backend.BuildArgs(cfg, "-")
|
||||
want := []string{"-o", "stream-json", "-y", "-p", "-"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("codex build args omits bypass flag by default", func(t *testing.T) {
|
||||
const key = "CODEX_BYPASS_SANDBOX"
|
||||
t.Cleanup(func() { os.Unsetenv(key) })
|
||||
os.Unsetenv(key)
|
||||
t.Setenv(key, "false")
|
||||
|
||||
backend := CodexBackend{}
|
||||
cfg := &Config{Mode: "new", WorkDir: "/tmp"}
|
||||
cfg := &config.Config{Mode: "new", WorkDir: "/tmp"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"e", "--skip-git-repo-check", "-C", "/tmp", "--json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -117,11 +167,10 @@ func TestClaudeBuildArgs_GeminiAndCodexModes(t *testing.T) {
|
||||
|
||||
t.Run("codex build args includes bypass flag when enabled", func(t *testing.T) {
|
||||
const key = "CODEX_BYPASS_SANDBOX"
|
||||
t.Cleanup(func() { os.Unsetenv(key) })
|
||||
os.Setenv(key, "true")
|
||||
t.Setenv(key, "true")
|
||||
|
||||
backend := CodexBackend{}
|
||||
cfg := &Config{Mode: "new", WorkDir: "/tmp"}
|
||||
cfg := &config.Config{Mode: "new", WorkDir: "/tmp"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"e", "--dangerously-bypass-approvals-and-sandbox", "--skip-git-repo-check", "-C", "/tmp", "--json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
@@ -157,7 +206,7 @@ func TestLoadMinimalEnvSettings(t *testing.T) {
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
t.Run("missing file returns empty", func(t *testing.T) {
|
||||
if got := loadMinimalEnvSettings(); len(got) != 0 {
|
||||
if got := LoadMinimalEnvSettings(); len(got) != 0 {
|
||||
t.Fatalf("got %v, want empty", got)
|
||||
}
|
||||
})
|
||||
@@ -173,7 +222,7 @@ func TestLoadMinimalEnvSettings(t *testing.T) {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got := loadMinimalEnvSettings()
|
||||
got := LoadMinimalEnvSettings()
|
||||
if got["ANTHROPIC_API_KEY"] != "secret" || got["FOO"] != "bar" {
|
||||
t.Fatalf("got %v, want keys present", got)
|
||||
}
|
||||
@@ -187,7 +236,7 @@ func TestLoadMinimalEnvSettings(t *testing.T) {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got := loadMinimalEnvSettings()
|
||||
got := LoadMinimalEnvSettings()
|
||||
if got["GOOD"] != "ok" {
|
||||
t.Fatalf("got %v, want GOOD=ok", got)
|
||||
}
|
||||
@@ -202,12 +251,72 @@ func TestLoadMinimalEnvSettings(t *testing.T) {
|
||||
t.Run("oversized file returns empty", func(t *testing.T) {
|
||||
dir := filepath.Join(home, ".claude")
|
||||
path := filepath.Join(dir, "settings.json")
|
||||
data := bytes.Repeat([]byte("a"), maxClaudeSettingsBytes+1)
|
||||
data := bytes.Repeat([]byte("a"), MaxClaudeSettingsBytes+1)
|
||||
if err := os.WriteFile(path, data, 0o600); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
if got := loadMinimalEnvSettings(); len(got) != 0 {
|
||||
if got := LoadMinimalEnvSettings(); len(got) != 0 {
|
||||
t.Fatalf("got %v, want empty", got)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestOpencodeBackend_BuildArgs(t *testing.T) {
|
||||
backend := OpencodeBackend{}
|
||||
|
||||
t.Run("basic", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new"}
|
||||
got := backend.BuildArgs(cfg, "hello")
|
||||
want := []string{"run", "--format", "json", "hello"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("with model", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new", Model: "opencode/grok-code"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"run", "-m", "opencode/grok-code", "--format", "json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("resume mode", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "resume", SessionID: "ses_123", Model: "opencode/grok-code"}
|
||||
got := backend.BuildArgs(cfg, "follow-up")
|
||||
want := []string{"run", "-m", "opencode/grok-code", "-s", "ses_123", "--format", "json", "follow-up"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("resume without session", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "resume"}
|
||||
got := backend.BuildArgs(cfg, "task")
|
||||
want := []string{"run", "--format", "json", "task"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("stdin mode omits dash", func(t *testing.T) {
|
||||
cfg := &config.Config{Mode: "new"}
|
||||
got := backend.BuildArgs(cfg, "-")
|
||||
want := []string{"run", "--format", "json"}
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Errorf("got %v, want %v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestOpencodeBackend_Interface(t *testing.T) {
|
||||
backend := OpencodeBackend{}
|
||||
|
||||
if backend.Name() != "opencode" {
|
||||
t.Errorf("Name() = %q, want %q", backend.Name(), "opencode")
|
||||
}
|
||||
if backend.Command() != "opencode" {
|
||||
t.Errorf("Command() = %q, want %q", backend.Command(), "opencode")
|
||||
}
|
||||
}
|
||||
139
codeagent-wrapper/internal/backend/claude.go
Normal file
139
codeagent-wrapper/internal/backend/claude.go
Normal file
@@ -0,0 +1,139 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
type ClaudeBackend struct{}
|
||||
|
||||
func (ClaudeBackend) Name() string { return "claude" }
|
||||
func (ClaudeBackend) Command() string { return "claude" }
|
||||
func (ClaudeBackend) Env(baseURL, apiKey string) map[string]string {
|
||||
baseURL = strings.TrimSpace(baseURL)
|
||||
apiKey = strings.TrimSpace(apiKey)
|
||||
if baseURL == "" && apiKey == "" {
|
||||
return nil
|
||||
}
|
||||
env := make(map[string]string, 2)
|
||||
if baseURL != "" {
|
||||
env["ANTHROPIC_BASE_URL"] = baseURL
|
||||
}
|
||||
if apiKey != "" {
|
||||
env["ANTHROPIC_API_KEY"] = apiKey
|
||||
}
|
||||
return env
|
||||
}
|
||||
func (ClaudeBackend) BuildArgs(cfg *config.Config, targetArg string) []string {
|
||||
return buildClaudeArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
const MaxClaudeSettingsBytes = 1 << 20 // 1MB
|
||||
|
||||
type MinimalClaudeSettings struct {
|
||||
Env map[string]string
|
||||
Model string
|
||||
}
|
||||
|
||||
// LoadMinimalClaudeSettings 从 ~/.claude/settings.json 只提取安全的最小子集:
|
||||
// - env: 只接受字符串类型的值
|
||||
// - model: 只接受字符串类型的值
|
||||
// 文件缺失/解析失败/超限都返回空。
|
||||
func LoadMinimalClaudeSettings() MinimalClaudeSettings {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || home == "" {
|
||||
return MinimalClaudeSettings{}
|
||||
}
|
||||
|
||||
claudeDir := filepath.Clean(filepath.Join(home, ".claude"))
|
||||
settingPath := filepath.Clean(filepath.Join(claudeDir, "settings.json"))
|
||||
rel, err := filepath.Rel(claudeDir, settingPath)
|
||||
if err != nil || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
|
||||
return MinimalClaudeSettings{}
|
||||
}
|
||||
|
||||
info, err := os.Stat(settingPath)
|
||||
if err != nil || info.Size() > MaxClaudeSettingsBytes {
|
||||
return MinimalClaudeSettings{}
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(settingPath) // #nosec G304 -- path is fixed under user home and validated to stay within claudeDir
|
||||
if err != nil {
|
||||
return MinimalClaudeSettings{}
|
||||
}
|
||||
|
||||
var cfg struct {
|
||||
Env map[string]any `json:"env"`
|
||||
Model any `json:"model"`
|
||||
}
|
||||
if err := json.Unmarshal(data, &cfg); err != nil {
|
||||
return MinimalClaudeSettings{}
|
||||
}
|
||||
|
||||
out := MinimalClaudeSettings{}
|
||||
|
||||
if model, ok := cfg.Model.(string); ok {
|
||||
out.Model = strings.TrimSpace(model)
|
||||
}
|
||||
|
||||
if len(cfg.Env) == 0 {
|
||||
return out
|
||||
}
|
||||
|
||||
env := make(map[string]string, len(cfg.Env))
|
||||
for k, v := range cfg.Env {
|
||||
s, ok := v.(string)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
env[k] = s
|
||||
}
|
||||
if len(env) == 0 {
|
||||
return out
|
||||
}
|
||||
out.Env = env
|
||||
return out
|
||||
}
|
||||
|
||||
func LoadMinimalEnvSettings() map[string]string {
|
||||
settings := LoadMinimalClaudeSettings()
|
||||
if len(settings.Env) == 0 {
|
||||
return nil
|
||||
}
|
||||
return settings.Env
|
||||
}
|
||||
|
||||
func buildClaudeArgs(cfg *config.Config, targetArg string) []string {
|
||||
if cfg == nil {
|
||||
return nil
|
||||
}
|
||||
args := []string{"-p"}
|
||||
// Default to skip permissions unless CODEAGENT_SKIP_PERMISSIONS=false
|
||||
if cfg.SkipPermissions || cfg.Yolo || config.EnvFlagDefaultTrue("CODEAGENT_SKIP_PERMISSIONS") {
|
||||
args = append(args, "--dangerously-skip-permissions")
|
||||
}
|
||||
|
||||
// Prevent infinite recursion: disable all setting sources (user, project, local)
|
||||
// This ensures a clean execution environment without CLAUDE.md or skills that would trigger codeagent
|
||||
args = append(args, "--setting-sources", "")
|
||||
|
||||
if model := strings.TrimSpace(cfg.Model); model != "" {
|
||||
args = append(args, "--model", model)
|
||||
}
|
||||
|
||||
if cfg.Mode == "resume" {
|
||||
if cfg.SessionID != "" {
|
||||
// Claude CLI uses -r <session_id> for resume.
|
||||
args = append(args, "-r", cfg.SessionID)
|
||||
}
|
||||
}
|
||||
|
||||
args = append(args, "--output-format", "stream-json", "--verbose", targetArg)
|
||||
|
||||
return args
|
||||
}
|
||||
79
codeagent-wrapper/internal/backend/codex.go
Normal file
79
codeagent-wrapper/internal/backend/codex.go
Normal file
@@ -0,0 +1,79 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
type CodexBackend struct{}
|
||||
|
||||
func (CodexBackend) Name() string { return "codex" }
|
||||
func (CodexBackend) Command() string { return "codex" }
|
||||
func (CodexBackend) Env(baseURL, apiKey string) map[string]string {
|
||||
baseURL = strings.TrimSpace(baseURL)
|
||||
apiKey = strings.TrimSpace(apiKey)
|
||||
if baseURL == "" && apiKey == "" {
|
||||
return nil
|
||||
}
|
||||
env := make(map[string]string, 2)
|
||||
if baseURL != "" {
|
||||
env["OPENAI_BASE_URL"] = baseURL
|
||||
}
|
||||
if apiKey != "" {
|
||||
env["OPENAI_API_KEY"] = apiKey
|
||||
}
|
||||
return env
|
||||
}
|
||||
func (CodexBackend) BuildArgs(cfg *config.Config, targetArg string) []string {
|
||||
return BuildCodexArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
func BuildCodexArgs(cfg *config.Config, targetArg string) []string {
|
||||
if cfg == nil {
|
||||
panic("buildCodexArgs: nil config")
|
||||
}
|
||||
|
||||
var resumeSessionID string
|
||||
isResume := cfg.Mode == "resume"
|
||||
if isResume {
|
||||
resumeSessionID = strings.TrimSpace(cfg.SessionID)
|
||||
if resumeSessionID == "" {
|
||||
logErrorFn("invalid config: resume mode requires non-empty session_id")
|
||||
isResume = false
|
||||
}
|
||||
}
|
||||
|
||||
args := []string{"e"}
|
||||
|
||||
// Default to bypass sandbox unless CODEX_BYPASS_SANDBOX=false
|
||||
if cfg.Yolo || config.EnvFlagDefaultTrue("CODEX_BYPASS_SANDBOX") {
|
||||
logWarnFn("YOLO mode or CODEX_BYPASS_SANDBOX enabled: running without approval/sandbox protection")
|
||||
args = append(args, "--dangerously-bypass-approvals-and-sandbox")
|
||||
}
|
||||
|
||||
if model := strings.TrimSpace(cfg.Model); model != "" {
|
||||
args = append(args, "--model", model)
|
||||
}
|
||||
|
||||
if reasoningEffort := strings.TrimSpace(cfg.ReasoningEffort); reasoningEffort != "" {
|
||||
args = append(args, "-c", "model_reasoning_effort="+reasoningEffort)
|
||||
}
|
||||
|
||||
args = append(args, "--skip-git-repo-check")
|
||||
|
||||
if isResume {
|
||||
return append(args,
|
||||
"--json",
|
||||
"resume",
|
||||
resumeSessionID,
|
||||
targetArg,
|
||||
)
|
||||
}
|
||||
|
||||
return append(args,
|
||||
"-C", cfg.WorkDir,
|
||||
"--json",
|
||||
targetArg,
|
||||
)
|
||||
}
|
||||
110
codeagent-wrapper/internal/backend/gemini.go
Normal file
110
codeagent-wrapper/internal/backend/gemini.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
type GeminiBackend struct{}
|
||||
|
||||
func (GeminiBackend) Name() string { return "gemini" }
|
||||
func (GeminiBackend) Command() string { return "gemini" }
|
||||
func (GeminiBackend) Env(baseURL, apiKey string) map[string]string {
|
||||
baseURL = strings.TrimSpace(baseURL)
|
||||
apiKey = strings.TrimSpace(apiKey)
|
||||
if baseURL == "" && apiKey == "" {
|
||||
return nil
|
||||
}
|
||||
env := make(map[string]string, 2)
|
||||
if baseURL != "" {
|
||||
env["GOOGLE_GEMINI_BASE_URL"] = baseURL
|
||||
}
|
||||
if apiKey != "" {
|
||||
env["GEMINI_API_KEY"] = apiKey
|
||||
}
|
||||
return env
|
||||
}
|
||||
func (GeminiBackend) BuildArgs(cfg *config.Config, targetArg string) []string {
|
||||
return buildGeminiArgs(cfg, targetArg)
|
||||
}
|
||||
|
||||
// LoadGeminiEnv loads environment variables from ~/.gemini/.env
|
||||
// Supports GEMINI_API_KEY, GEMINI_MODEL, GOOGLE_GEMINI_BASE_URL
|
||||
// Also sets GEMINI_API_KEY_AUTH_MECHANISM=bearer for third-party API compatibility
|
||||
func LoadGeminiEnv() map[string]string {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || home == "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
envDir := filepath.Clean(filepath.Join(home, ".gemini"))
|
||||
envPath := filepath.Clean(filepath.Join(envDir, ".env"))
|
||||
rel, err := filepath.Rel(envDir, envPath)
|
||||
if err != nil || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
|
||||
return nil
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(envPath) // #nosec G304 -- path is fixed under user home and validated to stay within envDir
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
env := make(map[string]string)
|
||||
for _, line := range strings.Split(string(data), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" || strings.HasPrefix(line, "#") {
|
||||
continue
|
||||
}
|
||||
idx := strings.IndexByte(line, '=')
|
||||
if idx <= 0 {
|
||||
continue
|
||||
}
|
||||
key := strings.TrimSpace(line[:idx])
|
||||
value := strings.TrimSpace(line[idx+1:])
|
||||
if key != "" && value != "" {
|
||||
env[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
// Set bearer auth mechanism for third-party API compatibility
|
||||
if _, ok := env["GEMINI_API_KEY"]; ok {
|
||||
if _, hasAuth := env["GEMINI_API_KEY_AUTH_MECHANISM"]; !hasAuth {
|
||||
env["GEMINI_API_KEY_AUTH_MECHANISM"] = "bearer"
|
||||
}
|
||||
}
|
||||
|
||||
if len(env) == 0 {
|
||||
return nil
|
||||
}
|
||||
return env
|
||||
}
|
||||
|
||||
func buildGeminiArgs(cfg *config.Config, targetArg string) []string {
|
||||
if cfg == nil {
|
||||
return nil
|
||||
}
|
||||
args := []string{"-o", "stream-json", "-y"}
|
||||
|
||||
if model := strings.TrimSpace(cfg.Model); model != "" {
|
||||
args = append(args, "-m", model)
|
||||
}
|
||||
|
||||
if cfg.Mode == "resume" {
|
||||
if cfg.SessionID != "" {
|
||||
args = append(args, "-r", cfg.SessionID)
|
||||
}
|
||||
}
|
||||
|
||||
// Use positional argument instead of deprecated -p flag.
|
||||
// For stdin mode ("-"), use -p to read from stdin.
|
||||
if targetArg == "-" {
|
||||
args = append(args, "-p", targetArg)
|
||||
} else {
|
||||
args = append(args, targetArg)
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
29
codeagent-wrapper/internal/backend/opencode.go
Normal file
29
codeagent-wrapper/internal/backend/opencode.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
type OpencodeBackend struct{}
|
||||
|
||||
func (OpencodeBackend) Name() string { return "opencode" }
|
||||
func (OpencodeBackend) Command() string { return "opencode" }
|
||||
func (OpencodeBackend) Env(baseURL, apiKey string) map[string]string { return nil }
|
||||
func (OpencodeBackend) BuildArgs(cfg *config.Config, targetArg string) []string {
|
||||
args := []string{"run"}
|
||||
if cfg != nil {
|
||||
if model := strings.TrimSpace(cfg.Model); model != "" {
|
||||
args = append(args, "-m", model)
|
||||
}
|
||||
if cfg.Mode == "resume" && cfg.SessionID != "" {
|
||||
args = append(args, "-s", cfg.SessionID)
|
||||
}
|
||||
}
|
||||
args = append(args, "--format", "json")
|
||||
if targetArg != "-" {
|
||||
args = append(args, targetArg)
|
||||
}
|
||||
return args
|
||||
}
|
||||
29
codeagent-wrapper/internal/backend/registry.go
Normal file
29
codeagent-wrapper/internal/backend/registry.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var registry = map[string]Backend{
|
||||
"codex": CodexBackend{},
|
||||
"claude": ClaudeBackend{},
|
||||
"gemini": GeminiBackend{},
|
||||
"opencode": OpencodeBackend{},
|
||||
}
|
||||
|
||||
// Registry exposes the available backends. Intended for internal inspection/tests.
|
||||
func Registry() map[string]Backend {
|
||||
return registry
|
||||
}
|
||||
|
||||
func Select(name string) (Backend, error) {
|
||||
key := strings.ToLower(strings.TrimSpace(name))
|
||||
if key == "" {
|
||||
key = "codex"
|
||||
}
|
||||
if backend, ok := registry[key]; ok {
|
||||
return backend, nil
|
||||
}
|
||||
return nil, fmt.Errorf("unsupported backend %q", name)
|
||||
}
|
||||
220
codeagent-wrapper/internal/config/agent.go
Normal file
220
codeagent-wrapper/internal/config/agent.go
Normal file
@@ -0,0 +1,220 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
ilogger "codeagent-wrapper/internal/logger"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
type BackendConfig struct {
|
||||
BaseURL string `json:"base_url,omitempty"`
|
||||
APIKey string `json:"api_key,omitempty"`
|
||||
}
|
||||
|
||||
type AgentModelConfig struct {
|
||||
Backend string `json:"backend"`
|
||||
Model string `json:"model"`
|
||||
PromptFile string `json:"prompt_file,omitempty"`
|
||||
Description string `json:"description,omitempty"`
|
||||
Yolo bool `json:"yolo,omitempty"`
|
||||
Reasoning string `json:"reasoning,omitempty"`
|
||||
BaseURL string `json:"base_url,omitempty"`
|
||||
APIKey string `json:"api_key,omitempty"`
|
||||
}
|
||||
|
||||
type ModelsConfig struct {
|
||||
DefaultBackend string `json:"default_backend"`
|
||||
DefaultModel string `json:"default_model"`
|
||||
Agents map[string]AgentModelConfig `json:"agents"`
|
||||
Backends map[string]BackendConfig `json:"backends,omitempty"`
|
||||
}
|
||||
|
||||
var defaultModelsConfig = ModelsConfig{
|
||||
DefaultBackend: "opencode",
|
||||
DefaultModel: "opencode/grok-code",
|
||||
Agents: map[string]AgentModelConfig{
|
||||
"oracle": {Backend: "claude", Model: "claude-opus-4-5-20251101", PromptFile: "~/.claude/skills/omo/references/oracle.md", Description: "Technical advisor"},
|
||||
"librarian": {Backend: "claude", Model: "claude-sonnet-4-5-20250929", PromptFile: "~/.claude/skills/omo/references/librarian.md", Description: "Researcher"},
|
||||
"explore": {Backend: "opencode", Model: "opencode/grok-code", PromptFile: "~/.claude/skills/omo/references/explore.md", Description: "Code search"},
|
||||
"develop": {Backend: "codex", Model: "", PromptFile: "~/.claude/skills/omo/references/develop.md", Description: "Code development"},
|
||||
"frontend-ui-ux-engineer": {Backend: "gemini", Model: "", PromptFile: "~/.claude/skills/omo/references/frontend-ui-ux-engineer.md", Description: "Frontend engineer"},
|
||||
"document-writer": {Backend: "gemini", Model: "", PromptFile: "~/.claude/skills/omo/references/document-writer.md", Description: "Documentation"},
|
||||
},
|
||||
}
|
||||
|
||||
var (
|
||||
modelsConfigOnce sync.Once
|
||||
modelsConfigCached *ModelsConfig
|
||||
)
|
||||
|
||||
func modelsConfig() *ModelsConfig {
|
||||
modelsConfigOnce.Do(func() {
|
||||
modelsConfigCached = loadModelsConfig()
|
||||
})
|
||||
if modelsConfigCached == nil {
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
return modelsConfigCached
|
||||
}
|
||||
|
||||
func loadModelsConfig() *ModelsConfig {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to resolve home directory for models config: %v; using defaults", err))
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
|
||||
configDir := filepath.Clean(filepath.Join(home, ".codeagent"))
|
||||
configPath := filepath.Clean(filepath.Join(configDir, "models.json"))
|
||||
rel, err := filepath.Rel(configDir, configPath)
|
||||
if err != nil || rel == ".." || strings.HasPrefix(rel, ".."+string(os.PathSeparator)) {
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(configPath) // #nosec G304 -- path is fixed under user home and validated to stay within configDir
|
||||
if err != nil {
|
||||
if !os.IsNotExist(err) {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to read models config %s: %v; using defaults", configPath, err))
|
||||
}
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
|
||||
var cfg ModelsConfig
|
||||
if err := json.Unmarshal(data, &cfg); err != nil {
|
||||
ilogger.LogWarn(fmt.Sprintf("Failed to parse models config %s: %v; using defaults", configPath, err))
|
||||
return &defaultModelsConfig
|
||||
}
|
||||
|
||||
cfg.DefaultBackend = strings.TrimSpace(cfg.DefaultBackend)
|
||||
if cfg.DefaultBackend == "" {
|
||||
cfg.DefaultBackend = defaultModelsConfig.DefaultBackend
|
||||
}
|
||||
cfg.DefaultModel = strings.TrimSpace(cfg.DefaultModel)
|
||||
if cfg.DefaultModel == "" {
|
||||
cfg.DefaultModel = defaultModelsConfig.DefaultModel
|
||||
}
|
||||
|
||||
// Merge with defaults
|
||||
for name, agent := range defaultModelsConfig.Agents {
|
||||
if _, exists := cfg.Agents[name]; !exists {
|
||||
if cfg.Agents == nil {
|
||||
cfg.Agents = make(map[string]AgentModelConfig)
|
||||
}
|
||||
cfg.Agents[name] = agent
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize backend keys so lookups can be case-insensitive.
|
||||
if len(cfg.Backends) > 0 {
|
||||
normalized := make(map[string]BackendConfig, len(cfg.Backends))
|
||||
for k, v := range cfg.Backends {
|
||||
key := strings.ToLower(strings.TrimSpace(k))
|
||||
if key == "" {
|
||||
continue
|
||||
}
|
||||
normalized[key] = v
|
||||
}
|
||||
if len(normalized) > 0 {
|
||||
cfg.Backends = normalized
|
||||
} else {
|
||||
cfg.Backends = nil
|
||||
}
|
||||
}
|
||||
|
||||
return &cfg
|
||||
}
|
||||
|
||||
func LoadDynamicAgent(name string) (AgentModelConfig, bool) {
|
||||
if err := ValidateAgentName(name); err != nil {
|
||||
return AgentModelConfig{}, false
|
||||
}
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || strings.TrimSpace(home) == "" {
|
||||
return AgentModelConfig{}, false
|
||||
}
|
||||
|
||||
absPath := filepath.Join(home, ".codeagent", "agents", name+".md")
|
||||
info, err := os.Stat(absPath)
|
||||
if err != nil || info.IsDir() {
|
||||
return AgentModelConfig{}, false
|
||||
}
|
||||
|
||||
return AgentModelConfig{PromptFile: "~/.codeagent/agents/" + name + ".md"}, true
|
||||
}
|
||||
|
||||
func ResolveBackendConfig(backendName string) (baseURL, apiKey string) {
|
||||
cfg := modelsConfig()
|
||||
resolved := resolveBackendConfig(cfg, backendName)
|
||||
return strings.TrimSpace(resolved.BaseURL), strings.TrimSpace(resolved.APIKey)
|
||||
}
|
||||
|
||||
func resolveBackendConfig(cfg *ModelsConfig, backendName string) BackendConfig {
|
||||
if cfg == nil || len(cfg.Backends) == 0 {
|
||||
return BackendConfig{}
|
||||
}
|
||||
key := strings.ToLower(strings.TrimSpace(backendName))
|
||||
if key == "" {
|
||||
key = strings.ToLower(strings.TrimSpace(cfg.DefaultBackend))
|
||||
}
|
||||
if key == "" {
|
||||
return BackendConfig{}
|
||||
}
|
||||
if backend, ok := cfg.Backends[key]; ok {
|
||||
return backend
|
||||
}
|
||||
return BackendConfig{}
|
||||
}
|
||||
|
||||
func resolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool) {
|
||||
cfg := modelsConfig()
|
||||
if agent, ok := cfg.Agents[agentName]; ok {
|
||||
backend = strings.TrimSpace(agent.Backend)
|
||||
if backend == "" {
|
||||
backend = cfg.DefaultBackend
|
||||
}
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
|
||||
baseURL = strings.TrimSpace(agent.BaseURL)
|
||||
if baseURL == "" {
|
||||
baseURL = strings.TrimSpace(backendCfg.BaseURL)
|
||||
}
|
||||
apiKey = strings.TrimSpace(agent.APIKey)
|
||||
if apiKey == "" {
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
}
|
||||
|
||||
return backend, strings.TrimSpace(agent.Model), agent.PromptFile, agent.Reasoning, baseURL, apiKey, agent.Yolo
|
||||
}
|
||||
|
||||
if dynamic, ok := LoadDynamicAgent(agentName); ok {
|
||||
backend = cfg.DefaultBackend
|
||||
model = cfg.DefaultModel
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
baseURL = strings.TrimSpace(backendCfg.BaseURL)
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
return backend, model, dynamic.PromptFile, "", baseURL, apiKey, false
|
||||
}
|
||||
|
||||
backend = cfg.DefaultBackend
|
||||
model = cfg.DefaultModel
|
||||
backendCfg := resolveBackendConfig(cfg, backend)
|
||||
baseURL = strings.TrimSpace(backendCfg.BaseURL)
|
||||
apiKey = strings.TrimSpace(backendCfg.APIKey)
|
||||
return backend, model, "", "", baseURL, apiKey, false
|
||||
}
|
||||
|
||||
func ResolveAgentConfig(agentName string) (backend, model, promptFile, reasoning, baseURL, apiKey string, yolo bool) {
|
||||
return resolveAgentConfig(agentName)
|
||||
}
|
||||
|
||||
func ResetModelsConfigCacheForTest() {
|
||||
modelsConfigCached = nil
|
||||
modelsConfigOnce = sync.Once{}
|
||||
}
|
||||
221
codeagent-wrapper/internal/config/agent_config_test.go
Normal file
221
codeagent-wrapper/internal/config/agent_config_test.go
Normal file
@@ -0,0 +1,221 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestResolveAgentConfig_Defaults(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
// Test that default agents resolve correctly without config file
|
||||
tests := []struct {
|
||||
agent string
|
||||
wantBackend string
|
||||
wantModel string
|
||||
wantPromptFile string
|
||||
}{
|
||||
{"oracle", "claude", "claude-opus-4-5-20251101", "~/.claude/skills/omo/references/oracle.md"},
|
||||
{"librarian", "claude", "claude-sonnet-4-5-20250929", "~/.claude/skills/omo/references/librarian.md"},
|
||||
{"explore", "opencode", "opencode/grok-code", "~/.claude/skills/omo/references/explore.md"},
|
||||
{"frontend-ui-ux-engineer", "gemini", "", "~/.claude/skills/omo/references/frontend-ui-ux-engineer.md"},
|
||||
{"document-writer", "gemini", "", "~/.claude/skills/omo/references/document-writer.md"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.agent, func(t *testing.T) {
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig(tt.agent)
|
||||
if backend != tt.wantBackend {
|
||||
t.Errorf("backend = %q, want %q", backend, tt.wantBackend)
|
||||
}
|
||||
if model != tt.wantModel {
|
||||
t.Errorf("model = %q, want %q", model, tt.wantModel)
|
||||
}
|
||||
if promptFile != tt.wantPromptFile {
|
||||
t.Errorf("promptFile = %q, want %q", promptFile, tt.wantPromptFile)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveAgentConfig_UnknownAgent(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig("unknown-agent")
|
||||
if backend != "opencode" {
|
||||
t.Errorf("unknown agent backend = %q, want %q", backend, "opencode")
|
||||
}
|
||||
if model != "opencode/grok-code" {
|
||||
t.Errorf("unknown agent model = %q, want %q", model, "opencode/grok-code")
|
||||
}
|
||||
if promptFile != "" {
|
||||
t.Errorf("unknown agent promptFile = %q, want empty", promptFile)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadModelsConfig_NoFile(t *testing.T) {
|
||||
home := "/nonexistent/path/that/does/not/exist"
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
if cfg.DefaultBackend != "opencode" {
|
||||
t.Errorf("DefaultBackend = %q, want %q", cfg.DefaultBackend, "opencode")
|
||||
}
|
||||
if len(cfg.Agents) != 6 {
|
||||
t.Errorf("len(Agents) = %d, want 6", len(cfg.Agents))
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadModelsConfig_WithFile(t *testing.T) {
|
||||
// Create temp dir and config file
|
||||
tmpDir := t.TempDir()
|
||||
configDir := filepath.Join(tmpDir, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0755); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
configContent := `{
|
||||
"default_backend": "claude",
|
||||
"default_model": "claude-opus-4",
|
||||
"backends": {
|
||||
"Claude": {
|
||||
"base_url": "https://backend.example",
|
||||
"api_key": "backend-key"
|
||||
},
|
||||
"codex": {
|
||||
"base_url": "https://openai.example",
|
||||
"api_key": "openai-key"
|
||||
}
|
||||
},
|
||||
"agents": {
|
||||
"custom-agent": {
|
||||
"backend": "codex",
|
||||
"model": "gpt-4o",
|
||||
"description": "Custom agent",
|
||||
"base_url": "https://agent.example",
|
||||
"api_key": "agent-key"
|
||||
}
|
||||
}
|
||||
}`
|
||||
configPath := filepath.Join(configDir, "models.json")
|
||||
if err := os.WriteFile(configPath, []byte(configContent), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Setenv("HOME", tmpDir)
|
||||
t.Setenv("USERPROFILE", tmpDir)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
|
||||
if cfg.DefaultBackend != "claude" {
|
||||
t.Errorf("DefaultBackend = %q, want %q", cfg.DefaultBackend, "claude")
|
||||
}
|
||||
if cfg.DefaultModel != "claude-opus-4" {
|
||||
t.Errorf("DefaultModel = %q, want %q", cfg.DefaultModel, "claude-opus-4")
|
||||
}
|
||||
|
||||
// Check custom agent
|
||||
if agent, ok := cfg.Agents["custom-agent"]; !ok {
|
||||
t.Error("custom-agent not found")
|
||||
} else {
|
||||
if agent.Backend != "codex" {
|
||||
t.Errorf("custom-agent.Backend = %q, want %q", agent.Backend, "codex")
|
||||
}
|
||||
if agent.Model != "gpt-4o" {
|
||||
t.Errorf("custom-agent.Model = %q, want %q", agent.Model, "gpt-4o")
|
||||
}
|
||||
}
|
||||
|
||||
// Check that defaults are merged
|
||||
if _, ok := cfg.Agents["oracle"]; !ok {
|
||||
t.Error("default agent oracle should be merged")
|
||||
}
|
||||
|
||||
baseURL, apiKey := ResolveBackendConfig("claude")
|
||||
if baseURL != "https://backend.example" {
|
||||
t.Errorf("ResolveBackendConfig(baseURL) = %q, want %q", baseURL, "https://backend.example")
|
||||
}
|
||||
if apiKey != "backend-key" {
|
||||
t.Errorf("ResolveBackendConfig(apiKey) = %q, want %q", apiKey, "backend-key")
|
||||
}
|
||||
|
||||
backend, model, _, _, agentBaseURL, agentAPIKey, _ := ResolveAgentConfig("custom-agent")
|
||||
if backend != "codex" {
|
||||
t.Errorf("ResolveAgentConfig(backend) = %q, want %q", backend, "codex")
|
||||
}
|
||||
if model != "gpt-4o" {
|
||||
t.Errorf("ResolveAgentConfig(model) = %q, want %q", model, "gpt-4o")
|
||||
}
|
||||
if agentBaseURL != "https://agent.example" {
|
||||
t.Errorf("ResolveAgentConfig(baseURL) = %q, want %q", agentBaseURL, "https://agent.example")
|
||||
}
|
||||
if agentAPIKey != "agent-key" {
|
||||
t.Errorf("ResolveAgentConfig(apiKey) = %q, want %q", agentAPIKey, "agent-key")
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveAgentConfig_DynamicAgent(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
agentDir := filepath.Join(home, ".codeagent", "agents")
|
||||
if err := os.MkdirAll(agentDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
if err := os.WriteFile(filepath.Join(agentDir, "sarsh.md"), []byte("prompt\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
backend, model, promptFile, _, _, _, _ := resolveAgentConfig("sarsh")
|
||||
if backend != "opencode" {
|
||||
t.Errorf("backend = %q, want %q", backend, "opencode")
|
||||
}
|
||||
if model != "opencode/grok-code" {
|
||||
t.Errorf("model = %q, want %q", model, "opencode/grok-code")
|
||||
}
|
||||
if promptFile != "~/.codeagent/agents/sarsh.md" {
|
||||
t.Errorf("promptFile = %q, want %q", promptFile, "~/.codeagent/agents/sarsh.md")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadModelsConfig_InvalidJSON(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
configDir := filepath.Join(tmpDir, ".codeagent")
|
||||
if err := os.MkdirAll(configDir, 0755); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Write invalid JSON
|
||||
configPath := filepath.Join(configDir, "models.json")
|
||||
if err := os.WriteFile(configPath, []byte("invalid json {"), 0644); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Setenv("HOME", tmpDir)
|
||||
t.Setenv("USERPROFILE", tmpDir)
|
||||
t.Cleanup(ResetModelsConfigCacheForTest)
|
||||
ResetModelsConfigCacheForTest()
|
||||
|
||||
cfg := loadModelsConfig()
|
||||
// Should fall back to defaults
|
||||
if cfg.DefaultBackend != "opencode" {
|
||||
t.Errorf("invalid JSON should fallback, got DefaultBackend = %q", cfg.DefaultBackend)
|
||||
}
|
||||
}
|
||||
102
codeagent-wrapper/internal/config/config.go
Normal file
102
codeagent-wrapper/internal/config/config.go
Normal file
@@ -0,0 +1,102 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Config holds CLI configuration.
|
||||
type Config struct {
|
||||
Mode string // "new" or "resume"
|
||||
Task string
|
||||
SessionID string
|
||||
WorkDir string
|
||||
Model string
|
||||
ReasoningEffort string
|
||||
ExplicitStdin bool
|
||||
Timeout int
|
||||
Backend string
|
||||
Agent string
|
||||
PromptFile string
|
||||
PromptFileExplicit bool
|
||||
SkipPermissions bool
|
||||
Yolo bool
|
||||
MaxParallelWorkers int
|
||||
}
|
||||
|
||||
// EnvFlagEnabled returns true when the environment variable exists and is not
|
||||
// explicitly set to a falsey value ("0/false/no/off").
|
||||
func EnvFlagEnabled(key string) bool {
|
||||
val, ok := os.LookupEnv(key)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
val = strings.TrimSpace(strings.ToLower(val))
|
||||
switch val {
|
||||
case "", "0", "false", "no", "off":
|
||||
return false
|
||||
default:
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func ParseBoolFlag(val string, defaultValue bool) bool {
|
||||
val = strings.TrimSpace(strings.ToLower(val))
|
||||
switch val {
|
||||
case "1", "true", "yes", "on":
|
||||
return true
|
||||
case "0", "false", "no", "off":
|
||||
return false
|
||||
default:
|
||||
return defaultValue
|
||||
}
|
||||
}
|
||||
|
||||
// EnvFlagDefaultTrue returns true unless the env var is explicitly set to
|
||||
// false/0/no/off.
|
||||
func EnvFlagDefaultTrue(key string) bool {
|
||||
val, ok := os.LookupEnv(key)
|
||||
if !ok {
|
||||
return true
|
||||
}
|
||||
return ParseBoolFlag(val, true)
|
||||
}
|
||||
|
||||
func ValidateAgentName(name string) error {
|
||||
if strings.TrimSpace(name) == "" {
|
||||
return fmt.Errorf("agent name is empty")
|
||||
}
|
||||
for _, r := range name {
|
||||
switch {
|
||||
case r >= 'a' && r <= 'z':
|
||||
case r >= 'A' && r <= 'Z':
|
||||
case r >= '0' && r <= '9':
|
||||
case r == '-', r == '_':
|
||||
default:
|
||||
return fmt.Errorf("agent name %q contains invalid character %q", name, r)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
const maxParallelWorkersLimit = 100
|
||||
|
||||
// ResolveMaxParallelWorkers reads CODEAGENT_MAX_PARALLEL_WORKERS. It returns 0
|
||||
// for "unlimited".
|
||||
func ResolveMaxParallelWorkers() int {
|
||||
raw := strings.TrimSpace(os.Getenv("CODEAGENT_MAX_PARALLEL_WORKERS"))
|
||||
if raw == "" {
|
||||
return 0
|
||||
}
|
||||
|
||||
value, err := strconv.Atoi(raw)
|
||||
if err != nil || value < 0 {
|
||||
return 0
|
||||
}
|
||||
if value > maxParallelWorkersLimit {
|
||||
return maxParallelWorkersLimit
|
||||
}
|
||||
return value
|
||||
}
|
||||
47
codeagent-wrapper/internal/config/viper.go
Normal file
47
codeagent-wrapper/internal/config/viper.go
Normal file
@@ -0,0 +1,47 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// NewViper returns a viper instance configured for CODEAGENT_* environment
|
||||
// variables and an optional config file.
|
||||
//
|
||||
// Search order when configFile is empty:
|
||||
// - $HOME/.codeagent/config.(yaml|yml|json|toml|...)
|
||||
func NewViper(configFile string) (*viper.Viper, error) {
|
||||
v := viper.New()
|
||||
v.SetEnvPrefix("CODEAGENT")
|
||||
v.SetEnvKeyReplacer(strings.NewReplacer("-", "_"))
|
||||
v.AutomaticEnv()
|
||||
|
||||
if strings.TrimSpace(configFile) != "" {
|
||||
v.SetConfigFile(configFile)
|
||||
if err := v.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return v, nil
|
||||
}
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil || strings.TrimSpace(home) == "" {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
v.SetConfigName("config")
|
||||
v.AddConfigPath(filepath.Join(home, ".codeagent"))
|
||||
if err := v.ReadInConfig(); err != nil {
|
||||
var notFound viper.ConfigFileNotFoundError
|
||||
if errors.As(err, ¬Found) {
|
||||
return v, nil
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package executor
|
||||
|
||||
import (
|
||||
"context"
|
||||
@@ -14,15 +14,98 @@ import (
|
||||
"sync/atomic"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
backend "codeagent-wrapper/internal/backend"
|
||||
config "codeagent-wrapper/internal/config"
|
||||
ilogger "codeagent-wrapper/internal/logger"
|
||||
parser "codeagent-wrapper/internal/parser"
|
||||
utils "codeagent-wrapper/internal/utils"
|
||||
)
|
||||
|
||||
const postMessageTerminateDelay = 1 * time.Second
|
||||
const forceKillWaitTimeout = 5 * time.Second
|
||||
|
||||
// Defaults duplicated from wrapper for module decoupling.
|
||||
const (
|
||||
defaultWorkdir = "."
|
||||
defaultCoverageTarget = 90.0
|
||||
defaultBackendName = "codex"
|
||||
|
||||
codexLogLineLimit = 1000
|
||||
stderrCaptureLimit = 4 * 1024
|
||||
)
|
||||
|
||||
const (
|
||||
// stdout close reasons
|
||||
stdoutCloseReasonWait = "wait-done"
|
||||
stdoutCloseReasonDrain = "drain-timeout"
|
||||
stdoutCloseReasonCtx = "context-cancel"
|
||||
stdoutDrainTimeout = 100 * time.Millisecond
|
||||
)
|
||||
|
||||
// Hook points (tests can override inside this package).
|
||||
var (
|
||||
selectBackendFn = backend.Select
|
||||
commandContext = exec.CommandContext
|
||||
terminateCommandFn = terminateCommand
|
||||
)
|
||||
|
||||
var forceKillDelay atomic.Int32
|
||||
|
||||
func init() {
|
||||
forceKillDelay.Store(5) // seconds - default value
|
||||
}
|
||||
|
||||
type (
|
||||
Backend = backend.Backend
|
||||
Config = config.Config
|
||||
Logger = ilogger.Logger
|
||||
)
|
||||
|
||||
type minimalClaudeSettings = backend.MinimalClaudeSettings
|
||||
|
||||
func loadMinimalClaudeSettings() minimalClaudeSettings { return backend.LoadMinimalClaudeSettings() }
|
||||
|
||||
func loadGeminiEnv() map[string]string { return backend.LoadGeminiEnv() }
|
||||
|
||||
func NewLogger() (*Logger, error) { return ilogger.NewLogger() }
|
||||
|
||||
func NewLoggerWithSuffix(suffix string) (*Logger, error) { return ilogger.NewLoggerWithSuffix(suffix) }
|
||||
|
||||
func setLogger(l *Logger) { ilogger.SetLogger(l) }
|
||||
|
||||
func closeLogger() error { return ilogger.CloseLogger() }
|
||||
|
||||
func activeLogger() *Logger { return ilogger.ActiveLogger() }
|
||||
|
||||
func logInfo(msg string) { ilogger.LogInfo(msg) }
|
||||
|
||||
func logWarn(msg string) { ilogger.LogWarn(msg) }
|
||||
|
||||
func logError(msg string) { ilogger.LogError(msg) }
|
||||
|
||||
func logConcurrencyPlanning(limit, total int) { ilogger.LogConcurrencyPlanning(limit, total) }
|
||||
|
||||
func logConcurrencyState(event, taskID string, active, limit int) {
|
||||
ilogger.LogConcurrencyState(event, taskID, active, limit)
|
||||
}
|
||||
|
||||
func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(string), onMessage func(), onComplete func()) (message, threadID string) {
|
||||
return parser.ParseJSONStreamInternal(r, warnFn, infoFn, onMessage, onComplete)
|
||||
}
|
||||
|
||||
func sanitizeOutput(s string) string { return utils.SanitizeOutput(s) }
|
||||
|
||||
func safeTruncate(s string, maxLen int) string { return utils.SafeTruncate(s, maxLen) }
|
||||
|
||||
func min(a, b int) int { return utils.Min(a, b) }
|
||||
|
||||
// commandRunner abstracts exec.Cmd for testability
|
||||
type commandRunner interface {
|
||||
Start() error
|
||||
Wait() error
|
||||
StdoutPipe() (io.ReadCloser, error)
|
||||
StderrPipe() (io.ReadCloser, error)
|
||||
StdinPipe() (io.WriteCloser, error)
|
||||
SetStderr(io.Writer)
|
||||
SetDir(string)
|
||||
@@ -63,6 +146,13 @@ func (r *realCmd) StdoutPipe() (io.ReadCloser, error) {
|
||||
return r.cmd.StdoutPipe()
|
||||
}
|
||||
|
||||
func (r *realCmd) StderrPipe() (io.ReadCloser, error) {
|
||||
if r.cmd == nil {
|
||||
return nil, errors.New("command is nil")
|
||||
}
|
||||
return r.cmd.StderrPipe()
|
||||
}
|
||||
|
||||
func (r *realCmd) StdinPipe() (io.WriteCloser, error) {
|
||||
if r.cmd == nil {
|
||||
return nil, errors.New("command is nil")
|
||||
@@ -221,14 +311,21 @@ func newTaskLoggerHandle(taskID string) taskLoggerHandle {
|
||||
}
|
||||
|
||||
// defaultRunCodexTaskFn is the default implementation of runCodexTaskFn (exposed for test reset)
|
||||
func defaultRunCodexTaskFn(task TaskSpec, timeout int) TaskResult {
|
||||
func DefaultRunCodexTaskFn(task TaskSpec, timeout int) TaskResult {
|
||||
if task.WorkDir == "" {
|
||||
task.WorkDir = defaultWorkdir
|
||||
}
|
||||
if task.Mode == "" {
|
||||
task.Mode = "new"
|
||||
}
|
||||
if task.UseStdin || shouldUseStdin(task.Task, false) {
|
||||
if strings.TrimSpace(task.PromptFile) != "" {
|
||||
prompt, err := ReadAgentPromptFile(task.PromptFile, false)
|
||||
if err != nil {
|
||||
return TaskResult{TaskID: task.ID, ExitCode: 1, Error: "failed to read prompt file: " + err.Error()}
|
||||
}
|
||||
task.Task = WrapTaskWithAgentPrompt(prompt, task.Task)
|
||||
}
|
||||
if task.UseStdin || ShouldUseStdin(task.Task, false) {
|
||||
task.UseStdin = true
|
||||
}
|
||||
|
||||
@@ -247,12 +344,10 @@ func defaultRunCodexTaskFn(task TaskSpec, timeout int) TaskResult {
|
||||
if parentCtx == nil {
|
||||
parentCtx = context.Background()
|
||||
}
|
||||
return runCodexTaskWithContext(parentCtx, task, backend, nil, false, true, timeout)
|
||||
return RunCodexTaskWithContext(parentCtx, task, backend, "", nil, nil, false, true, timeout)
|
||||
}
|
||||
|
||||
var runCodexTaskFn = defaultRunCodexTaskFn
|
||||
|
||||
func topologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) {
|
||||
func TopologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) {
|
||||
idToTask := make(map[string]TaskSpec, len(tasks))
|
||||
indegree := make(map[string]int, len(tasks))
|
||||
adj := make(map[string][]string, len(tasks))
|
||||
@@ -318,12 +413,16 @@ func topologicalSort(tasks []TaskSpec) ([][]TaskSpec, error) {
|
||||
return layers, nil
|
||||
}
|
||||
|
||||
func executeConcurrent(layers [][]TaskSpec, timeout int) []TaskResult {
|
||||
maxWorkers := resolveMaxParallelWorkers()
|
||||
return executeConcurrentWithContext(context.Background(), layers, timeout, maxWorkers)
|
||||
func ExecuteConcurrent(layers [][]TaskSpec, timeout int, runTask func(TaskSpec, int) TaskResult) []TaskResult {
|
||||
maxWorkers := config.ResolveMaxParallelWorkers()
|
||||
return ExecuteConcurrentWithContext(context.Background(), layers, timeout, maxWorkers, runTask)
|
||||
}
|
||||
|
||||
func executeConcurrentWithContext(parentCtx context.Context, layers [][]TaskSpec, timeout int, maxWorkers int) []TaskResult {
|
||||
func ExecuteConcurrentWithContext(parentCtx context.Context, layers [][]TaskSpec, timeout int, maxWorkers int, runTask func(TaskSpec, int) TaskResult) []TaskResult {
|
||||
if runTask == nil {
|
||||
runTask = DefaultRunCodexTaskFn
|
||||
}
|
||||
|
||||
totalTasks := 0
|
||||
for _, layer := range layers {
|
||||
totalTasks += len(layer)
|
||||
@@ -454,7 +553,7 @@ func executeConcurrentWithContext(parentCtx context.Context, layers [][]TaskSpec
|
||||
|
||||
printTaskStart(ts.ID, taskLogPath, handle.shared)
|
||||
|
||||
res := runCodexTaskFn(ts, timeout)
|
||||
res := runTask(ts, timeout)
|
||||
if taskLogPath != "" {
|
||||
if res.LogPath == "" || (handle.shared && handle.logger != nil && res.LogPath == handle.logger.Path()) {
|
||||
res.LogPath = taskLogPath
|
||||
@@ -519,14 +618,14 @@ func getStatusSymbols() (success, warning, failed string) {
|
||||
return "✓", "⚠️", "✗"
|
||||
}
|
||||
|
||||
func generateFinalOutput(results []TaskResult) string {
|
||||
return generateFinalOutputWithMode(results, true) // default to summary mode
|
||||
func GenerateFinalOutput(results []TaskResult) string {
|
||||
return GenerateFinalOutputWithMode(results, true) // default to summary mode
|
||||
}
|
||||
|
||||
// generateFinalOutputWithMode generates output based on mode
|
||||
// summaryOnly=true: structured report - every token has value
|
||||
// summaryOnly=false: full output with complete messages (legacy behavior)
|
||||
func generateFinalOutputWithMode(results []TaskResult, summaryOnly bool) string {
|
||||
func GenerateFinalOutputWithMode(results []TaskResult, summaryOnly bool) string {
|
||||
var sb strings.Builder
|
||||
successSymbol, warningSymbol, failedSymbol := getStatusSymbols()
|
||||
|
||||
@@ -739,11 +838,20 @@ func buildCodexArgs(cfg *Config, targetArg string) []string {
|
||||
|
||||
args := []string{"e"}
|
||||
|
||||
if envFlagEnabled("CODEX_BYPASS_SANDBOX") {
|
||||
logWarn("CODEX_BYPASS_SANDBOX=true: running without approval/sandbox protection")
|
||||
// Default to bypass sandbox unless CODEX_BYPASS_SANDBOX=false
|
||||
if cfg.Yolo || config.EnvFlagDefaultTrue("CODEX_BYPASS_SANDBOX") {
|
||||
logWarn("YOLO mode or CODEX_BYPASS_SANDBOX enabled: running without approval/sandbox protection")
|
||||
args = append(args, "--dangerously-bypass-approvals-and-sandbox")
|
||||
}
|
||||
|
||||
if model := strings.TrimSpace(cfg.Model); model != "" {
|
||||
args = append(args, "--model", model)
|
||||
}
|
||||
|
||||
if reasoningEffort := strings.TrimSpace(cfg.ReasoningEffort); reasoningEffort != "" {
|
||||
args = append(args, "-c", "model_reasoning_effort="+reasoningEffort)
|
||||
}
|
||||
|
||||
args = append(args, "--skip-git-repo-check")
|
||||
|
||||
if isResume {
|
||||
@@ -762,37 +870,41 @@ func buildCodexArgs(cfg *Config, targetArg string) []string {
|
||||
)
|
||||
}
|
||||
|
||||
func runCodexTask(taskSpec TaskSpec, silent bool, timeoutSec int) TaskResult {
|
||||
return runCodexTaskWithContext(context.Background(), taskSpec, nil, nil, false, silent, timeoutSec)
|
||||
}
|
||||
|
||||
func runCodexProcess(parentCtx context.Context, codexArgs []string, taskText string, useStdin bool, timeoutSec int) (message, threadID string, exitCode int) {
|
||||
res := runCodexTaskWithContext(parentCtx, TaskSpec{Task: taskText, WorkDir: defaultWorkdir, Mode: "new", UseStdin: useStdin}, nil, codexArgs, true, false, timeoutSec)
|
||||
return res.Message, res.SessionID, res.ExitCode
|
||||
}
|
||||
|
||||
func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backend Backend, customArgs []string, useCustomArgs bool, silent bool, timeoutSec int) TaskResult {
|
||||
func RunCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backend Backend, defaultCommandName string, defaultArgsBuilder func(*Config, string) []string, customArgs []string, useCustomArgs bool, silent bool, timeoutSec int) TaskResult {
|
||||
taskCtx := taskSpec.Context
|
||||
if parentCtx == nil {
|
||||
parentCtx = taskSpec.Context
|
||||
parentCtx = taskCtx
|
||||
}
|
||||
if parentCtx == nil {
|
||||
parentCtx = context.Background()
|
||||
}
|
||||
|
||||
result := TaskResult{TaskID: taskSpec.ID}
|
||||
injectedLogger := taskLoggerFromContext(parentCtx)
|
||||
injectedLogger := taskLoggerFromContext(taskCtx)
|
||||
if injectedLogger == nil {
|
||||
injectedLogger = taskLoggerFromContext(parentCtx)
|
||||
}
|
||||
logger := injectedLogger
|
||||
|
||||
cfg := &Config{
|
||||
Mode: taskSpec.Mode,
|
||||
Task: taskSpec.Task,
|
||||
SessionID: taskSpec.SessionID,
|
||||
WorkDir: taskSpec.WorkDir,
|
||||
Backend: defaultBackendName,
|
||||
Mode: taskSpec.Mode,
|
||||
Task: taskSpec.Task,
|
||||
SessionID: taskSpec.SessionID,
|
||||
WorkDir: taskSpec.WorkDir,
|
||||
Model: taskSpec.Model,
|
||||
ReasoningEffort: taskSpec.ReasoningEffort,
|
||||
SkipPermissions: taskSpec.SkipPermissions,
|
||||
Backend: defaultBackendName,
|
||||
}
|
||||
|
||||
commandName := codexCommand
|
||||
argsBuilder := buildCodexArgsFn
|
||||
commandName := strings.TrimSpace(defaultCommandName)
|
||||
if commandName == "" {
|
||||
commandName = defaultBackendName
|
||||
}
|
||||
argsBuilder := defaultArgsBuilder
|
||||
if argsBuilder == nil {
|
||||
argsBuilder = buildCodexArgs
|
||||
}
|
||||
if backend != nil {
|
||||
commandName = backend.Command()
|
||||
argsBuilder = backend.BuildArgs
|
||||
@@ -816,6 +928,25 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
return result
|
||||
}
|
||||
|
||||
var fileEnv map[string]string
|
||||
if cfg.Backend == "claude" {
|
||||
settings := loadMinimalClaudeSettings()
|
||||
fileEnv = settings.Env
|
||||
if cfg.Mode != "resume" && strings.TrimSpace(cfg.Model) == "" && settings.Model != "" {
|
||||
cfg.Model = settings.Model
|
||||
}
|
||||
}
|
||||
|
||||
// Load gemini env from ~/.gemini/.env if exists
|
||||
if cfg.Backend == "gemini" {
|
||||
fileEnv = loadGeminiEnv()
|
||||
if cfg.Mode != "resume" && strings.TrimSpace(cfg.Model) == "" {
|
||||
if model := fileEnv["GEMINI_MODEL"]; model != "" {
|
||||
cfg.Model = model
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
useStdin := taskSpec.UseStdin
|
||||
targetArg := taskSpec.Task
|
||||
if useStdin {
|
||||
@@ -915,9 +1046,27 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
|
||||
cmd := newCommandRunner(ctx, commandName, codexArgs...)
|
||||
|
||||
if cfg.Backend == "claude" {
|
||||
if env := loadMinimalEnvSettings(); len(env) > 0 {
|
||||
cmd.SetEnv(env)
|
||||
if len(fileEnv) > 0 {
|
||||
cmd.SetEnv(fileEnv)
|
||||
}
|
||||
|
||||
envBackend := backend
|
||||
if envBackend == nil && cfg.Backend != "" {
|
||||
if b, err := selectBackendFn(cfg.Backend); err == nil {
|
||||
envBackend = b
|
||||
}
|
||||
}
|
||||
|
||||
if envBackend != nil {
|
||||
baseURL, apiKey := config.ResolveBackendConfig(cfg.Backend)
|
||||
if agentName := strings.TrimSpace(taskSpec.Agent); agentName != "" {
|
||||
agentBackend, _, _, _, agentBaseURL, agentAPIKey, _ := config.ResolveAgentConfig(agentName)
|
||||
if strings.EqualFold(strings.TrimSpace(agentBackend), strings.TrimSpace(cfg.Backend)) {
|
||||
baseURL, apiKey = agentBaseURL, agentAPIKey
|
||||
}
|
||||
}
|
||||
if injected := envBackend.Env(baseURL, apiKey); len(injected) > 0 {
|
||||
cmd.SetEnv(injected)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -939,33 +1088,43 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
if cfg.Backend == "gemini" {
|
||||
stderrFilter = newFilteringWriter(os.Stderr, geminiNoisePatterns)
|
||||
stderrOut = stderrFilter
|
||||
defer stderrFilter.Flush()
|
||||
} else if cfg.Backend == "codex" {
|
||||
stderrFilter = newFilteringWriter(os.Stderr, codexNoisePatterns)
|
||||
stderrOut = stderrFilter
|
||||
}
|
||||
stderrWriters = append([]io.Writer{stderrOut}, stderrWriters...)
|
||||
}
|
||||
if len(stderrWriters) == 1 {
|
||||
cmd.SetStderr(stderrWriters[0])
|
||||
} else {
|
||||
cmd.SetStderr(io.MultiWriter(stderrWriters...))
|
||||
stderr, err := cmd.StderrPipe()
|
||||
if err != nil {
|
||||
logErrorFn("Failed to create stderr pipe: " + err.Error())
|
||||
result.ExitCode = 1
|
||||
result.Error = attachStderr("failed to create stderr pipe: " + err.Error())
|
||||
return result
|
||||
}
|
||||
|
||||
var stdinPipe io.WriteCloser
|
||||
var err error
|
||||
if useStdin {
|
||||
stdinPipe, err = cmd.StdinPipe()
|
||||
if err != nil {
|
||||
logErrorFn("Failed to create stdin pipe: " + err.Error())
|
||||
result.ExitCode = 1
|
||||
result.Error = attachStderr("failed to create stdin pipe: " + err.Error())
|
||||
closeWithReason(stderr, "stdin-pipe-failed")
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
stderrDone := make(chan error, 1)
|
||||
|
||||
stdout, err := cmd.StdoutPipe()
|
||||
if err != nil {
|
||||
logErrorFn("Failed to create stdout pipe: " + err.Error())
|
||||
result.ExitCode = 1
|
||||
result.Error = attachStderr("failed to create stdout pipe: " + err.Error())
|
||||
closeWithReason(stderr, "stdout-pipe-failed")
|
||||
if stdinPipe != nil {
|
||||
_ = stdinPipe.Close()
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
@@ -1001,6 +1160,11 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
logInfoFn(fmt.Sprintf("Starting %s with args: %s %s...", commandName, commandName, strings.Join(codexArgs[:min(5, len(codexArgs))], " ")))
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
closeWithReason(stdout, "start-failed")
|
||||
closeWithReason(stderr, "start-failed")
|
||||
if stdinPipe != nil {
|
||||
_ = stdinPipe.Close()
|
||||
}
|
||||
if strings.Contains(err.Error(), "executable file not found") {
|
||||
msg := fmt.Sprintf("%s command not found in PATH", commandName)
|
||||
logErrorFn(msg)
|
||||
@@ -1019,6 +1183,15 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
logInfoFn(fmt.Sprintf("Log capturing to: %s", logger.Path()))
|
||||
}
|
||||
|
||||
// Start stderr drain AFTER we know the command started, but BEFORE cmd.Wait can close the pipe.
|
||||
go func() {
|
||||
_, copyErr := io.Copy(io.MultiWriter(stderrWriters...), stderr)
|
||||
if stderrFilter != nil {
|
||||
stderrFilter.Flush()
|
||||
}
|
||||
stderrDone <- copyErr
|
||||
}()
|
||||
|
||||
if useStdin && stdinPipe != nil {
|
||||
logInfoFn(fmt.Sprintf("Writing %d chars to stdin...", len(taskSpec.Task)))
|
||||
go func(data string) {
|
||||
@@ -1046,7 +1219,8 @@ func runCodexTaskWithContext(parentCtx context.Context, taskSpec TaskSpec, backe
|
||||
waitLoop:
|
||||
for {
|
||||
select {
|
||||
case waitErr = <-waitCh:
|
||||
case err := <-waitCh:
|
||||
waitErr = err
|
||||
break waitLoop
|
||||
case <-ctx.Done():
|
||||
ctxCancelled = true
|
||||
@@ -1057,8 +1231,17 @@ waitLoop:
|
||||
terminated = true
|
||||
}
|
||||
}
|
||||
waitErr = <-waitCh
|
||||
break waitLoop
|
||||
for {
|
||||
select {
|
||||
case err := <-waitCh:
|
||||
waitErr = err
|
||||
break waitLoop
|
||||
case <-time.After(forceKillWaitTimeout):
|
||||
if proc := cmd.Process(); proc != nil {
|
||||
_ = proc.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
case <-messageTimerCh:
|
||||
forcedAfterComplete = true
|
||||
messageTimerCh = nil
|
||||
@@ -1069,6 +1252,20 @@ waitLoop:
|
||||
terminated = true
|
||||
}
|
||||
}
|
||||
// Close pipes to unblock stream readers, then wait for process exit.
|
||||
closeWithReason(stdout, "terminate")
|
||||
closeWithReason(stderr, "terminate")
|
||||
for {
|
||||
select {
|
||||
case err := <-waitCh:
|
||||
waitErr = err
|
||||
break waitLoop
|
||||
case <-time.After(forceKillWaitTimeout):
|
||||
if proc := cmd.Process(); proc != nil {
|
||||
_ = proc.Kill()
|
||||
}
|
||||
}
|
||||
}
|
||||
case <-completeSeen:
|
||||
completeSeenObserved = true
|
||||
if messageTimer != nil {
|
||||
@@ -1110,11 +1307,9 @@ waitLoop:
|
||||
case parsed = <-parseCh:
|
||||
closeWithReason(stdout, stdoutCloseReasonWait)
|
||||
case <-messageSeen:
|
||||
messageSeenObserved = true
|
||||
closeWithReason(stdout, stdoutCloseReasonWait)
|
||||
parsed = <-parseCh
|
||||
case <-completeSeen:
|
||||
completeSeenObserved = true
|
||||
closeWithReason(stdout, stdoutCloseReasonWait)
|
||||
parsed = <-parseCh
|
||||
case <-drainTimer.C:
|
||||
@@ -1123,6 +1318,12 @@ waitLoop:
|
||||
}
|
||||
}
|
||||
|
||||
closeWithReason(stderr, stdoutCloseReasonWait)
|
||||
// Wait for stderr drain so stderrBuf / stderrLogger are not accessed concurrently.
|
||||
// Important: cmd.Wait can block on internal stderr copying if cmd.Stderr is a non-file writer.
|
||||
// We use StderrPipe and drain ourselves to avoid that deadlock class (common when children inherit pipes).
|
||||
<-stderrDone
|
||||
|
||||
if ctxErr := ctx.Err(); ctxErr != nil {
|
||||
if errors.Is(ctxErr, context.DeadlineExceeded) {
|
||||
result.ExitCode = 124
|
||||
@@ -1178,44 +1379,13 @@ waitLoop:
|
||||
return result
|
||||
}
|
||||
|
||||
func forwardSignals(ctx context.Context, cmd commandRunner, logErrorFn func(string)) {
|
||||
notify := signalNotifyFn
|
||||
stop := signalStopFn
|
||||
if notify == nil {
|
||||
notify = signal.Notify
|
||||
}
|
||||
if stop == nil {
|
||||
stop = signal.Stop
|
||||
}
|
||||
|
||||
sigCh := make(chan os.Signal, 1)
|
||||
notify(sigCh, syscall.SIGINT, syscall.SIGTERM)
|
||||
|
||||
go func() {
|
||||
defer stop(sigCh)
|
||||
select {
|
||||
case sig := <-sigCh:
|
||||
logErrorFn(fmt.Sprintf("Received signal: %v", sig))
|
||||
if proc := cmd.Process(); proc != nil {
|
||||
_ = proc.Signal(syscall.SIGTERM)
|
||||
time.AfterFunc(time.Duration(forceKillDelay.Load())*time.Second, func() {
|
||||
if p := cmd.Process(); p != nil {
|
||||
_ = p.Kill()
|
||||
}
|
||||
})
|
||||
}
|
||||
case <-ctx.Done():
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func cancelReason(commandName string, ctx context.Context) string {
|
||||
if ctx == nil {
|
||||
return "Context cancelled"
|
||||
}
|
||||
|
||||
if commandName == "" {
|
||||
commandName = codexCommand
|
||||
commandName = defaultBackendName
|
||||
}
|
||||
|
||||
if errors.Is(ctx.Err(), context.DeadlineExceeded) {
|
||||
@@ -1267,7 +1437,7 @@ func terminateCommand(cmd commandRunner) *forceKillTimer {
|
||||
return nil
|
||||
}
|
||||
|
||||
_ = proc.Signal(syscall.SIGTERM)
|
||||
_ = sendTermSignal(proc)
|
||||
|
||||
done := make(chan struct{}, 1)
|
||||
timer := time.AfterFunc(time.Duration(forceKillDelay.Load())*time.Second, func() {
|
||||
@@ -1279,21 +1449,3 @@ func terminateCommand(cmd commandRunner) *forceKillTimer {
|
||||
|
||||
return &forceKillTimer{timer: timer, done: done}
|
||||
}
|
||||
|
||||
func terminateProcess(cmd commandRunner) *time.Timer {
|
||||
if cmd == nil {
|
||||
return nil
|
||||
}
|
||||
proc := cmd.Process()
|
||||
if proc == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
_ = proc.Signal(syscall.SIGTERM)
|
||||
|
||||
return time.AfterFunc(time.Duration(forceKillDelay.Load())*time.Second, func() {
|
||||
if p := cmd.Process(); p != nil {
|
||||
_ = p.Kill()
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package executor
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -18,6 +18,12 @@ var geminiNoisePatterns = []string{
|
||||
"YOLO mode is enabled",
|
||||
}
|
||||
|
||||
// codexNoisePatterns contains stderr patterns to filter for codex backend
|
||||
var codexNoisePatterns = []string{
|
||||
"ERROR codex_core::codex: needs_follow_up:",
|
||||
"ERROR codex_core::skills::loader:",
|
||||
}
|
||||
|
||||
// filteringWriter wraps an io.Writer and filters out lines matching patterns
|
||||
type filteringWriter struct {
|
||||
w io.Writer
|
||||
@@ -39,7 +45,7 @@ func (f *filteringWriter) Write(p []byte) (n int, err error) {
|
||||
break
|
||||
}
|
||||
if !f.shouldFilter(line) {
|
||||
f.w.Write([]byte(line))
|
||||
_, _ = f.w.Write([]byte(line))
|
||||
}
|
||||
}
|
||||
return len(p), nil
|
||||
@@ -59,7 +65,7 @@ func (f *filteringWriter) Flush() {
|
||||
if f.buf.Len() > 0 {
|
||||
remaining := f.buf.String()
|
||||
if !f.shouldFilter(remaining) {
|
||||
f.w.Write([]byte(remaining))
|
||||
_, _ = f.w.Write([]byte(remaining))
|
||||
}
|
||||
f.buf.Reset()
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package executor
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@@ -48,7 +48,7 @@ func TestFilteringWriter(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
fw := newFilteringWriter(&buf, tt.patterns)
|
||||
fw.Write([]byte(tt.input))
|
||||
_, _ = fw.Write([]byte(tt.input))
|
||||
fw.Flush()
|
||||
|
||||
if got := buf.String(); got != tt.want {
|
||||
@@ -63,8 +63,8 @@ func TestFilteringWriterPartialLines(t *testing.T) {
|
||||
fw := newFilteringWriter(&buf, geminiNoisePatterns)
|
||||
|
||||
// Write partial line
|
||||
fw.Write([]byte("Hello "))
|
||||
fw.Write([]byte("World\n"))
|
||||
_, _ = fw.Write([]byte("Hello "))
|
||||
_, _ = fw.Write([]byte("World\n"))
|
||||
fw.Flush()
|
||||
|
||||
if got := buf.String(); got != "Hello World\n" {
|
||||
124
codeagent-wrapper/internal/executor/log_helpers.go
Normal file
124
codeagent-wrapper/internal/executor/log_helpers.go
Normal file
@@ -0,0 +1,124 @@
|
||||
package executor
|
||||
|
||||
import "bytes"
|
||||
|
||||
type logWriter struct {
|
||||
prefix string
|
||||
maxLen int
|
||||
buf bytes.Buffer
|
||||
dropped bool
|
||||
}
|
||||
|
||||
func newLogWriter(prefix string, maxLen int) *logWriter {
|
||||
if maxLen <= 0 {
|
||||
maxLen = codexLogLineLimit
|
||||
}
|
||||
return &logWriter{prefix: prefix, maxLen: maxLen}
|
||||
}
|
||||
|
||||
func (lw *logWriter) Write(p []byte) (int, error) {
|
||||
if lw == nil {
|
||||
return len(p), nil
|
||||
}
|
||||
total := len(p)
|
||||
for len(p) > 0 {
|
||||
if idx := bytes.IndexByte(p, '\n'); idx >= 0 {
|
||||
lw.writeLimited(p[:idx])
|
||||
lw.logLine(true)
|
||||
p = p[idx+1:]
|
||||
continue
|
||||
}
|
||||
lw.writeLimited(p)
|
||||
break
|
||||
}
|
||||
return total, nil
|
||||
}
|
||||
|
||||
func (lw *logWriter) Flush() {
|
||||
if lw == nil || lw.buf.Len() == 0 {
|
||||
return
|
||||
}
|
||||
lw.logLine(false)
|
||||
}
|
||||
|
||||
func (lw *logWriter) logLine(force bool) {
|
||||
if lw == nil {
|
||||
return
|
||||
}
|
||||
line := lw.buf.String()
|
||||
dropped := lw.dropped
|
||||
lw.dropped = false
|
||||
lw.buf.Reset()
|
||||
if line == "" && !force {
|
||||
return
|
||||
}
|
||||
if lw.maxLen > 0 {
|
||||
if dropped {
|
||||
if lw.maxLen > 3 {
|
||||
line = line[:min(len(line), lw.maxLen-3)] + "..."
|
||||
} else {
|
||||
line = line[:min(len(line), lw.maxLen)]
|
||||
}
|
||||
} else if len(line) > lw.maxLen {
|
||||
cutoff := lw.maxLen
|
||||
if cutoff > 3 {
|
||||
line = line[:cutoff-3] + "..."
|
||||
} else {
|
||||
line = line[:cutoff]
|
||||
}
|
||||
}
|
||||
}
|
||||
logInfo(lw.prefix + line)
|
||||
}
|
||||
|
||||
func (lw *logWriter) writeLimited(p []byte) {
|
||||
if lw == nil || len(p) == 0 {
|
||||
return
|
||||
}
|
||||
if lw.maxLen <= 0 {
|
||||
lw.buf.Write(p)
|
||||
return
|
||||
}
|
||||
|
||||
remaining := lw.maxLen - lw.buf.Len()
|
||||
if remaining <= 0 {
|
||||
lw.dropped = true
|
||||
return
|
||||
}
|
||||
if len(p) <= remaining {
|
||||
lw.buf.Write(p)
|
||||
return
|
||||
}
|
||||
lw.buf.Write(p[:remaining])
|
||||
lw.dropped = true
|
||||
}
|
||||
|
||||
type tailBuffer struct {
|
||||
limit int
|
||||
data []byte
|
||||
}
|
||||
|
||||
func (b *tailBuffer) Write(p []byte) (int, error) {
|
||||
if b.limit <= 0 {
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
if len(p) >= b.limit {
|
||||
b.data = append(b.data[:0], p[len(p)-b.limit:]...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
total := len(b.data) + len(p)
|
||||
if total <= b.limit {
|
||||
b.data = append(b.data, p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
overflow := total - b.limit
|
||||
b.data = append(b.data[overflow:], p...)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
func (b *tailBuffer) String() string {
|
||||
return string(b.data)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package executor
|
||||
|
||||
import (
|
||||
"os"
|
||||
@@ -7,14 +7,12 @@ import (
|
||||
)
|
||||
|
||||
func TestLogWriterWriteLimitsBuffer(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("NewLogger error: %v", err)
|
||||
}
|
||||
setLogger(logger)
|
||||
defer closeLogger()
|
||||
t.Cleanup(func() { _ = closeLogger() })
|
||||
|
||||
lw := newLogWriter("P:", 10)
|
||||
_, _ = lw.Write([]byte(strings.Repeat("a", 100)))
|
||||
@@ -36,4 +34,3 @@ func TestLogWriterWriteLimitsBuffer(t *testing.T) {
|
||||
t.Fatalf("log output missing truncated entry, got %q", string(data))
|
||||
}
|
||||
}
|
||||
|
||||
135
codeagent-wrapper/internal/executor/parallel_config.go
Normal file
135
codeagent-wrapper/internal/executor/parallel_config.go
Normal file
@@ -0,0 +1,135 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
config "codeagent-wrapper/internal/config"
|
||||
)
|
||||
|
||||
func ParseParallelConfig(data []byte) (*ParallelConfig, error) {
|
||||
trimmed := bytes.TrimSpace(data)
|
||||
if len(trimmed) == 0 {
|
||||
return nil, fmt.Errorf("parallel config is empty")
|
||||
}
|
||||
|
||||
tasks := strings.Split(string(trimmed), "---TASK---")
|
||||
var cfg ParallelConfig
|
||||
seen := make(map[string]struct{})
|
||||
|
||||
taskIndex := 0
|
||||
for _, taskBlock := range tasks {
|
||||
taskBlock = strings.TrimSpace(taskBlock)
|
||||
if taskBlock == "" {
|
||||
continue
|
||||
}
|
||||
taskIndex++
|
||||
|
||||
parts := strings.SplitN(taskBlock, "---CONTENT---", 2)
|
||||
if len(parts) != 2 {
|
||||
return nil, fmt.Errorf("task block #%d missing ---CONTENT--- separator", taskIndex)
|
||||
}
|
||||
|
||||
meta := strings.TrimSpace(parts[0])
|
||||
content := strings.TrimSpace(parts[1])
|
||||
|
||||
task := TaskSpec{WorkDir: defaultWorkdir}
|
||||
agentSpecified := false
|
||||
for _, line := range strings.Split(meta, "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
kv := strings.SplitN(line, ":", 2)
|
||||
if len(kv) != 2 {
|
||||
continue
|
||||
}
|
||||
key := strings.TrimSpace(kv[0])
|
||||
value := strings.TrimSpace(kv[1])
|
||||
|
||||
switch key {
|
||||
case "id":
|
||||
task.ID = value
|
||||
case "workdir":
|
||||
// Validate workdir: "-" is not a valid directory
|
||||
if value == "-" {
|
||||
return nil, fmt.Errorf("task block #%d has invalid workdir: '-' is not a valid directory path", taskIndex)
|
||||
}
|
||||
task.WorkDir = value
|
||||
case "session_id":
|
||||
task.SessionID = value
|
||||
task.Mode = "resume"
|
||||
case "backend":
|
||||
task.Backend = value
|
||||
case "model":
|
||||
task.Model = value
|
||||
case "reasoning_effort":
|
||||
task.ReasoningEffort = value
|
||||
case "agent":
|
||||
agentSpecified = true
|
||||
task.Agent = value
|
||||
case "skip_permissions", "skip-permissions":
|
||||
if value == "" {
|
||||
task.SkipPermissions = true
|
||||
continue
|
||||
}
|
||||
task.SkipPermissions = config.ParseBoolFlag(value, false)
|
||||
case "dependencies":
|
||||
for _, dep := range strings.Split(value, ",") {
|
||||
dep = strings.TrimSpace(dep)
|
||||
if dep != "" {
|
||||
task.Dependencies = append(task.Dependencies, dep)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if task.Mode == "" {
|
||||
task.Mode = "new"
|
||||
}
|
||||
|
||||
if agentSpecified {
|
||||
if strings.TrimSpace(task.Agent) == "" {
|
||||
return nil, fmt.Errorf("task block #%d has empty agent field", taskIndex)
|
||||
}
|
||||
if err := config.ValidateAgentName(task.Agent); err != nil {
|
||||
return nil, fmt.Errorf("task block #%d invalid agent name: %w", taskIndex, err)
|
||||
}
|
||||
backend, model, promptFile, reasoning, _, _, _ := config.ResolveAgentConfig(task.Agent)
|
||||
if task.Backend == "" {
|
||||
task.Backend = backend
|
||||
}
|
||||
if task.Model == "" {
|
||||
task.Model = model
|
||||
}
|
||||
if task.ReasoningEffort == "" {
|
||||
task.ReasoningEffort = reasoning
|
||||
}
|
||||
task.PromptFile = promptFile
|
||||
}
|
||||
|
||||
if task.ID == "" {
|
||||
return nil, fmt.Errorf("task block #%d missing id field", taskIndex)
|
||||
}
|
||||
if content == "" {
|
||||
return nil, fmt.Errorf("task block #%d (%q) missing content", taskIndex, task.ID)
|
||||
}
|
||||
if task.Mode == "resume" && strings.TrimSpace(task.SessionID) == "" {
|
||||
return nil, fmt.Errorf("task block #%d (%q) has empty session_id", taskIndex, task.ID)
|
||||
}
|
||||
if _, exists := seen[task.ID]; exists {
|
||||
return nil, fmt.Errorf("task block #%d has duplicate id: %s", taskIndex, task.ID)
|
||||
}
|
||||
|
||||
task.Task = content
|
||||
cfg.Tasks = append(cfg.Tasks, task)
|
||||
seen[task.ID] = struct{}{}
|
||||
}
|
||||
|
||||
if len(cfg.Tasks) == 0 {
|
||||
return nil, fmt.Errorf("no tasks found")
|
||||
}
|
||||
|
||||
return &cfg, nil
|
||||
}
|
||||
130
codeagent-wrapper/internal/executor/prompt.go
Normal file
130
codeagent-wrapper/internal/executor/prompt.go
Normal file
@@ -0,0 +1,130 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func ReadAgentPromptFile(path string, allowOutsideClaudeDir bool) (string, error) {
|
||||
raw := strings.TrimSpace(path)
|
||||
if raw == "" {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
expanded := raw
|
||||
if raw == "~" || strings.HasPrefix(raw, "~/") || strings.HasPrefix(raw, "~\\") {
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
if raw == "~" {
|
||||
expanded = home
|
||||
} else {
|
||||
expanded = home + raw[1:]
|
||||
}
|
||||
}
|
||||
|
||||
absPath, err := filepath.Abs(expanded)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
absPath = filepath.Clean(absPath)
|
||||
|
||||
home, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
if !allowOutsideClaudeDir {
|
||||
return "", err
|
||||
}
|
||||
logWarn(fmt.Sprintf("Failed to resolve home directory for prompt file validation: %v; proceeding without restriction", err))
|
||||
} else {
|
||||
allowedDirs := []string{
|
||||
filepath.Clean(filepath.Join(home, ".claude")),
|
||||
filepath.Clean(filepath.Join(home, ".codeagent", "agents")),
|
||||
}
|
||||
for i := range allowedDirs {
|
||||
allowedAbs, err := filepath.Abs(allowedDirs[i])
|
||||
if err == nil {
|
||||
allowedDirs[i] = filepath.Clean(allowedAbs)
|
||||
}
|
||||
}
|
||||
|
||||
isWithinDir := func(path, dir string) bool {
|
||||
rel, err := filepath.Rel(dir, path)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
rel = filepath.Clean(rel)
|
||||
if rel == "." {
|
||||
return true
|
||||
}
|
||||
if rel == ".." {
|
||||
return false
|
||||
}
|
||||
prefix := ".." + string(os.PathSeparator)
|
||||
return !strings.HasPrefix(rel, prefix)
|
||||
}
|
||||
|
||||
if !allowOutsideClaudeDir {
|
||||
withinAllowed := false
|
||||
for _, dir := range allowedDirs {
|
||||
if isWithinDir(absPath, dir) {
|
||||
withinAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinAllowed {
|
||||
logWarn(fmt.Sprintf("Refusing to read prompt file outside allowed dirs (%s): %s", strings.Join(allowedDirs, ", "), absPath))
|
||||
return "", fmt.Errorf("prompt file must be under ~/.claude or ~/.codeagent/agents")
|
||||
}
|
||||
|
||||
resolvedPath, errPath := filepath.EvalSymlinks(absPath)
|
||||
if errPath == nil {
|
||||
resolvedPath = filepath.Clean(resolvedPath)
|
||||
resolvedAllowed := make([]string, 0, len(allowedDirs))
|
||||
for _, dir := range allowedDirs {
|
||||
resolvedBase, errBase := filepath.EvalSymlinks(dir)
|
||||
if errBase != nil {
|
||||
continue
|
||||
}
|
||||
resolvedAllowed = append(resolvedAllowed, filepath.Clean(resolvedBase))
|
||||
}
|
||||
if len(resolvedAllowed) > 0 {
|
||||
withinResolved := false
|
||||
for _, dir := range resolvedAllowed {
|
||||
if isWithinDir(resolvedPath, dir) {
|
||||
withinResolved = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinResolved {
|
||||
logWarn(fmt.Sprintf("Refusing to read prompt file outside allowed dirs (%s) (resolved): %s", strings.Join(resolvedAllowed, ", "), resolvedPath))
|
||||
return "", fmt.Errorf("prompt file must be under ~/.claude or ~/.codeagent/agents")
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
withinAllowed := false
|
||||
for _, dir := range allowedDirs {
|
||||
if isWithinDir(absPath, dir) {
|
||||
withinAllowed = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !withinAllowed {
|
||||
logWarn(fmt.Sprintf("Reading prompt file outside allowed dirs (%s): %s", strings.Join(allowedDirs, ", "), absPath))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
data, err := os.ReadFile(absPath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return strings.TrimRight(string(data), "\r\n"), nil
|
||||
}
|
||||
|
||||
func WrapTaskWithAgentPrompt(prompt string, task string) string {
|
||||
return "<agent-prompt>\n" + prompt + "\n</agent-prompt>\n\n" + task
|
||||
}
|
||||
186
codeagent-wrapper/internal/executor/prompt_file_test.go
Normal file
186
codeagent-wrapper/internal/executor/prompt_file_test.go
Normal file
@@ -0,0 +1,186 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestWrapTaskWithAgentPrompt(t *testing.T) {
|
||||
got := WrapTaskWithAgentPrompt("P", "do")
|
||||
want := "<agent-prompt>\nP\n</agent-prompt>\n\ndo"
|
||||
if got != want {
|
||||
t.Fatalf("wrapTaskWithAgentPrompt mismatch:\n got=%q\nwant=%q", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_EmptyPath(t *testing.T) {
|
||||
for _, allowOutside := range []bool{false, true} {
|
||||
got, err := ReadAgentPromptFile(" ", allowOutside)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error (allowOutside=%v): %v", allowOutside, err)
|
||||
}
|
||||
if got != "" {
|
||||
t.Fatalf("expected empty result (allowOutside=%v), got %q", allowOutside, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_ExplicitAbsolutePath(t *testing.T) {
|
||||
dir := t.TempDir()
|
||||
path := filepath.Join(dir, "prompt.md")
|
||||
if err := os.WriteFile(path, []byte("LINE1\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got, err := ReadAgentPromptFile(path, true)
|
||||
if err != nil {
|
||||
t.Fatalf("readAgentPromptFile error: %v", err)
|
||||
}
|
||||
if got != "LINE1" {
|
||||
t.Fatalf("got %q, want %q", got, "LINE1")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_ExplicitTildeExpansion(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
path := filepath.Join(home, "prompt.md")
|
||||
if err := os.WriteFile(path, []byte("P\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got, err := ReadAgentPromptFile("~/prompt.md", true)
|
||||
if err != nil {
|
||||
t.Fatalf("readAgentPromptFile error: %v", err)
|
||||
}
|
||||
if got != "P" {
|
||||
t.Fatalf("got %q, want %q", got, "P")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_RestrictedAllowsClaudeDir(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
claudeDir := filepath.Join(home, ".claude")
|
||||
if err := os.MkdirAll(claudeDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
path := filepath.Join(claudeDir, "prompt.md")
|
||||
if err := os.WriteFile(path, []byte("OK\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got, err := ReadAgentPromptFile("~/.claude/prompt.md", false)
|
||||
if err != nil {
|
||||
t.Fatalf("readAgentPromptFile error: %v", err)
|
||||
}
|
||||
if got != "OK" {
|
||||
t.Fatalf("got %q, want %q", got, "OK")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_RestrictedAllowsCodeagentAgentsDir(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
agentDir := filepath.Join(home, ".codeagent", "agents")
|
||||
if err := os.MkdirAll(agentDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
path := filepath.Join(agentDir, "sarsh.md")
|
||||
if err := os.WriteFile(path, []byte("OK\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
got, err := ReadAgentPromptFile("~/.codeagent/agents/sarsh.md", false)
|
||||
if err != nil {
|
||||
t.Fatalf("readAgentPromptFile error: %v", err)
|
||||
}
|
||||
if got != "OK" {
|
||||
t.Fatalf("got %q, want %q", got, "OK")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_RestrictedRejectsOutsideClaudeDir(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
path := filepath.Join(home, "prompt.md")
|
||||
if err := os.WriteFile(path, []byte("NO\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
if _, err := ReadAgentPromptFile("~/prompt.md", false); err == nil {
|
||||
t.Fatalf("expected error for prompt file outside ~/.claude, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_RestrictedRejectsTraversal(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
path := filepath.Join(home, "secret.md")
|
||||
if err := os.WriteFile(path, []byte("SECRET\n"), 0o644); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
|
||||
if _, err := ReadAgentPromptFile("~/.claude/../secret.md", false); err == nil {
|
||||
t.Fatalf("expected traversal to be rejected, got nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_NotFound(t *testing.T) {
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
claudeDir := filepath.Join(home, ".claude")
|
||||
if err := os.MkdirAll(claudeDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
|
||||
_, err := ReadAgentPromptFile("~/.claude/missing.md", false)
|
||||
if err == nil || !os.IsNotExist(err) {
|
||||
t.Fatalf("expected not-exist error, got %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadAgentPromptFile_PermissionDenied(t *testing.T) {
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("chmod-based permission test is not reliable on Windows")
|
||||
}
|
||||
|
||||
home := t.TempDir()
|
||||
t.Setenv("HOME", home)
|
||||
t.Setenv("USERPROFILE", home)
|
||||
|
||||
claudeDir := filepath.Join(home, ".claude")
|
||||
if err := os.MkdirAll(claudeDir, 0o755); err != nil {
|
||||
t.Fatalf("MkdirAll: %v", err)
|
||||
}
|
||||
path := filepath.Join(claudeDir, "private.md")
|
||||
if err := os.WriteFile(path, []byte("PRIVATE\n"), 0o600); err != nil {
|
||||
t.Fatalf("WriteFile: %v", err)
|
||||
}
|
||||
if err := os.Chmod(path, 0o000); err != nil {
|
||||
t.Fatalf("Chmod: %v", err)
|
||||
}
|
||||
|
||||
_, err := ReadAgentPromptFile("~/.claude/private.md", false)
|
||||
if err == nil {
|
||||
t.Fatalf("expected permission error, got nil")
|
||||
}
|
||||
if !os.IsPermission(err) && !strings.Contains(strings.ToLower(err.Error()), "permission") {
|
||||
t.Fatalf("expected permission denied, got: %v", err)
|
||||
}
|
||||
}
|
||||
104
codeagent-wrapper/internal/executor/report_helpers.go
Normal file
104
codeagent-wrapper/internal/executor/report_helpers.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package executor
|
||||
|
||||
import "strings"
|
||||
|
||||
// extractCoverageGap extracts what's missing from coverage reports.
|
||||
func extractCoverageGap(message string) string {
|
||||
if message == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
lower := strings.ToLower(message)
|
||||
lines := strings.Split(message, "\n")
|
||||
|
||||
for _, line := range lines {
|
||||
lineLower := strings.ToLower(line)
|
||||
line = strings.TrimSpace(line)
|
||||
|
||||
if strings.Contains(lineLower, "uncovered") ||
|
||||
strings.Contains(lineLower, "not covered") ||
|
||||
strings.Contains(lineLower, "missing coverage") ||
|
||||
strings.Contains(lineLower, "lines not covered") {
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
|
||||
if strings.Contains(lineLower, "branch") && strings.Contains(lineLower, "not taken") {
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(lower, "function") && strings.Contains(lower, "0%") {
|
||||
for _, line := range lines {
|
||||
if strings.Contains(strings.ToLower(line), "0%") && strings.Contains(line, "function") {
|
||||
line = strings.TrimSpace(line)
|
||||
if len(line) > 100 {
|
||||
return line[:97] + "..."
|
||||
}
|
||||
return line
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// extractErrorDetail extracts meaningful error context from task output.
|
||||
func extractErrorDetail(message string, maxLen int) string {
|
||||
if message == "" || maxLen <= 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
lines := strings.Split(message, "\n")
|
||||
var errorLines []string
|
||||
|
||||
for _, line := range lines {
|
||||
line = strings.TrimSpace(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
lower := strings.ToLower(line)
|
||||
|
||||
if strings.HasPrefix(line, "at ") && strings.Contains(line, "(") {
|
||||
if len(errorLines) > 0 && strings.HasPrefix(strings.ToLower(errorLines[len(errorLines)-1]), "at ") {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
if strings.Contains(lower, "error") ||
|
||||
strings.Contains(lower, "fail") ||
|
||||
strings.Contains(lower, "exception") ||
|
||||
strings.Contains(lower, "assert") ||
|
||||
strings.Contains(lower, "expected") ||
|
||||
strings.Contains(lower, "timeout") ||
|
||||
strings.Contains(lower, "not found") ||
|
||||
strings.Contains(lower, "cannot") ||
|
||||
strings.Contains(lower, "undefined") ||
|
||||
strings.HasPrefix(line, "FAIL") ||
|
||||
strings.HasPrefix(line, "●") {
|
||||
errorLines = append(errorLines, line)
|
||||
}
|
||||
}
|
||||
|
||||
if len(errorLines) == 0 {
|
||||
start := len(lines) - 5
|
||||
if start < 0 {
|
||||
start = 0
|
||||
}
|
||||
for _, line := range lines[start:] {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" {
|
||||
errorLines = append(errorLines, line)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result := strings.Join(errorLines, " | ")
|
||||
return safeTruncate(result, maxLen)
|
||||
}
|
||||
16
codeagent-wrapper/internal/executor/signal_unix.go
Normal file
16
codeagent-wrapper/internal/executor/signal_unix.go
Normal file
@@ -0,0 +1,16 @@
|
||||
//go:build unix || darwin || linux
|
||||
// +build unix darwin linux
|
||||
|
||||
package executor
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
)
|
||||
|
||||
// sendTermSignal sends SIGTERM for graceful shutdown on Unix.
|
||||
func sendTermSignal(proc processHandle) error {
|
||||
if proc == nil {
|
||||
return nil
|
||||
}
|
||||
return proc.Signal(syscall.SIGTERM)
|
||||
}
|
||||
87
codeagent-wrapper/internal/executor/signal_windows.go
Normal file
87
codeagent-wrapper/internal/executor/signal_windows.go
Normal file
@@ -0,0 +1,87 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package executor
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// sendTermSignal on Windows directly kills the process.
|
||||
// SIGTERM is not supported on Windows.
|
||||
func sendTermSignal(proc processHandle) error {
|
||||
if proc == nil {
|
||||
return nil
|
||||
}
|
||||
pid := proc.Pid()
|
||||
if pid > 0 {
|
||||
// Kill the whole process tree to avoid leaving inheriting child processes around.
|
||||
// This also helps prevent exec.Cmd.Wait() from blocking on stderr/stdout pipes held open by children.
|
||||
taskkill := "taskkill"
|
||||
if root := os.Getenv("SystemRoot"); root != "" {
|
||||
taskkill = filepath.Join(root, "System32", "taskkill.exe")
|
||||
}
|
||||
cmd := exec.Command(taskkill, "/PID", strconv.Itoa(pid), "/T", "/F")
|
||||
cmd.Stdout = io.Discard
|
||||
cmd.Stderr = io.Discard
|
||||
if err := cmd.Run(); err == nil {
|
||||
return nil
|
||||
}
|
||||
if err := killProcessTree(pid); err == nil {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return proc.Kill()
|
||||
}
|
||||
|
||||
func killProcessTree(pid int) error {
|
||||
if pid <= 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
wmic := "wmic"
|
||||
if root := os.Getenv("SystemRoot"); root != "" {
|
||||
wmic = filepath.Join(root, "System32", "wbem", "WMIC.exe")
|
||||
}
|
||||
|
||||
queryChildren := "(ParentProcessId=" + strconv.Itoa(pid) + ")"
|
||||
listCmd := exec.Command(wmic, "process", "where", queryChildren, "get", "ProcessId", "/VALUE")
|
||||
listCmd.Stderr = io.Discard
|
||||
out, err := listCmd.Output()
|
||||
if err == nil {
|
||||
for _, childPID := range parseWMICPIDs(out) {
|
||||
_ = killProcessTree(childPID)
|
||||
}
|
||||
}
|
||||
|
||||
querySelf := "(ProcessId=" + strconv.Itoa(pid) + ")"
|
||||
termCmd := exec.Command(wmic, "process", "where", querySelf, "call", "terminate")
|
||||
termCmd.Stdout = io.Discard
|
||||
termCmd.Stderr = io.Discard
|
||||
if termErr := termCmd.Run(); termErr != nil && err == nil {
|
||||
err = termErr
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func parseWMICPIDs(out []byte) []int {
|
||||
const prefix = "ProcessId="
|
||||
var pids []int
|
||||
for _, line := range strings.Split(string(out), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if !strings.HasPrefix(line, prefix) {
|
||||
continue
|
||||
}
|
||||
n, err := strconv.Atoi(strings.TrimSpace(strings.TrimPrefix(line, prefix)))
|
||||
if err != nil || n <= 0 {
|
||||
continue
|
||||
}
|
||||
pids = append(pids, n)
|
||||
}
|
||||
return pids
|
||||
}
|
||||
15
codeagent-wrapper/internal/executor/stdin.go
Normal file
15
codeagent-wrapper/internal/executor/stdin.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package executor
|
||||
|
||||
import "strings"
|
||||
|
||||
const stdinSpecialChars = "\n\\\"'`$"
|
||||
|
||||
func ShouldUseStdin(taskText string, piped bool) bool {
|
||||
if piped {
|
||||
return true
|
||||
}
|
||||
if len(taskText) > 800 {
|
||||
return true
|
||||
}
|
||||
return strings.ContainsAny(taskText, stdinSpecialChars)
|
||||
}
|
||||
46
codeagent-wrapper/internal/executor/task_types.go
Normal file
46
codeagent-wrapper/internal/executor/task_types.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package executor
|
||||
|
||||
import "context"
|
||||
|
||||
// ParallelConfig defines the JSON schema for parallel execution.
|
||||
type ParallelConfig struct {
|
||||
Tasks []TaskSpec `json:"tasks"`
|
||||
GlobalBackend string `json:"backend,omitempty"`
|
||||
}
|
||||
|
||||
// TaskSpec describes an individual task entry in the parallel config.
|
||||
type TaskSpec struct {
|
||||
ID string `json:"id"`
|
||||
Task string `json:"task"`
|
||||
WorkDir string `json:"workdir,omitempty"`
|
||||
Dependencies []string `json:"dependencies,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Backend string `json:"backend,omitempty"`
|
||||
Model string `json:"model,omitempty"`
|
||||
ReasoningEffort string `json:"reasoning_effort,omitempty"`
|
||||
Agent string `json:"agent,omitempty"`
|
||||
PromptFile string `json:"prompt_file,omitempty"`
|
||||
SkipPermissions bool `json:"skip_permissions,omitempty"`
|
||||
Mode string `json:"-"`
|
||||
UseStdin bool `json:"-"`
|
||||
Context context.Context `json:"-"`
|
||||
}
|
||||
|
||||
// TaskResult captures the execution outcome of a task.
|
||||
type TaskResult struct {
|
||||
TaskID string `json:"task_id"`
|
||||
ExitCode int `json:"exit_code"`
|
||||
Message string `json:"message"`
|
||||
SessionID string `json:"session_id"`
|
||||
Error string `json:"error"`
|
||||
LogPath string `json:"log_path"`
|
||||
// Structured report fields
|
||||
Coverage string `json:"coverage,omitempty"` // extracted coverage percentage (e.g., "92%")
|
||||
CoverageNum float64 `json:"coverage_num,omitempty"` // numeric coverage for comparison
|
||||
CoverageTarget float64 `json:"coverage_target,omitempty"` // target coverage (default 90)
|
||||
FilesChanged []string `json:"files_changed,omitempty"` // list of changed files
|
||||
KeyOutput string `json:"key_output,omitempty"` // brief summary of what was done
|
||||
TestsPassed int `json:"tests_passed,omitempty"` // number of tests passed
|
||||
TestsFailed int `json:"tests_failed,omitempty"` // number of tests failed
|
||||
sharedLog bool
|
||||
}
|
||||
57
codeagent-wrapper/internal/executor/testhooks.go
Normal file
57
codeagent-wrapper/internal/executor/testhooks.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package executor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os/exec"
|
||||
|
||||
backend "codeagent-wrapper/internal/backend"
|
||||
)
|
||||
|
||||
type CommandRunner = commandRunner
|
||||
type ProcessHandle = processHandle
|
||||
|
||||
func SetForceKillDelay(seconds int32) (restore func()) {
|
||||
prev := forceKillDelay.Load()
|
||||
forceKillDelay.Store(seconds)
|
||||
return func() { forceKillDelay.Store(prev) }
|
||||
}
|
||||
|
||||
func SetSelectBackendFn(fn func(string) (Backend, error)) (restore func()) {
|
||||
prev := selectBackendFn
|
||||
if fn != nil {
|
||||
selectBackendFn = fn
|
||||
} else {
|
||||
selectBackendFn = backend.Select
|
||||
}
|
||||
return func() { selectBackendFn = prev }
|
||||
}
|
||||
|
||||
func SetCommandContextFn(fn func(context.Context, string, ...string) *exec.Cmd) (restore func()) {
|
||||
prev := commandContext
|
||||
if fn != nil {
|
||||
commandContext = fn
|
||||
} else {
|
||||
commandContext = exec.CommandContext
|
||||
}
|
||||
return func() { commandContext = prev }
|
||||
}
|
||||
|
||||
func SetNewCommandRunner(fn func(context.Context, string, ...string) CommandRunner) (restore func()) {
|
||||
prev := newCommandRunner
|
||||
if fn != nil {
|
||||
newCommandRunner = fn
|
||||
} else {
|
||||
newCommandRunner = func(ctx context.Context, name string, args ...string) commandRunner {
|
||||
return &realCmd{cmd: commandContext(ctx, name, args...)}
|
||||
}
|
||||
}
|
||||
return func() { newCommandRunner = prev }
|
||||
}
|
||||
|
||||
func WithTaskLogger(ctx context.Context, logger *Logger) context.Context {
|
||||
return withTaskLogger(ctx, logger)
|
||||
}
|
||||
|
||||
func TaskLoggerFromContext(ctx context.Context) *Logger {
|
||||
return taskLoggerFromContext(ctx)
|
||||
}
|
||||
59
codeagent-wrapper/internal/logger/active.go
Normal file
59
codeagent-wrapper/internal/logger/active.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package logger
|
||||
|
||||
import "sync/atomic"
|
||||
|
||||
var loggerPtr atomic.Pointer[Logger]
|
||||
|
||||
func setLogger(l *Logger) {
|
||||
loggerPtr.Store(l)
|
||||
}
|
||||
|
||||
func closeLogger() error {
|
||||
logger := loggerPtr.Swap(nil)
|
||||
if logger == nil {
|
||||
return nil
|
||||
}
|
||||
return logger.Close()
|
||||
}
|
||||
|
||||
func activeLogger() *Logger {
|
||||
return loggerPtr.Load()
|
||||
}
|
||||
|
||||
func logDebug(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Debug(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func logInfo(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Info(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func logWarn(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Warn(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func logError(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func SetLogger(l *Logger) { setLogger(l) }
|
||||
|
||||
func CloseLogger() error { return closeLogger() }
|
||||
|
||||
func ActiveLogger() *Logger { return activeLogger() }
|
||||
|
||||
func LogInfo(msg string) { logInfo(msg) }
|
||||
|
||||
func LogDebug(msg string) { logDebug(msg) }
|
||||
|
||||
func LogWarn(msg string) { logWarn(msg) }
|
||||
|
||||
func LogError(msg string) { logError(msg) }
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package logger
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
@@ -13,6 +13,8 @@ import (
|
||||
"sync"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
// Logger writes log messages asynchronously to a temp file.
|
||||
@@ -22,6 +24,7 @@ type Logger struct {
|
||||
path string
|
||||
file *os.File
|
||||
writer *bufio.Writer
|
||||
zlogger zerolog.Logger
|
||||
ch chan logEntry
|
||||
flushReq chan chan struct{}
|
||||
done chan struct{}
|
||||
@@ -37,6 +40,7 @@ type Logger struct {
|
||||
|
||||
type logEntry struct {
|
||||
msg string
|
||||
level zerolog.Level
|
||||
isError bool // true for ERROR or WARN levels
|
||||
}
|
||||
|
||||
@@ -73,7 +77,7 @@ func NewLogger() (*Logger, error) {
|
||||
// Useful for tests that need isolated log files within the same process.
|
||||
func NewLoggerWithSuffix(suffix string) (*Logger, error) {
|
||||
pid := os.Getpid()
|
||||
filename := fmt.Sprintf("%s-%d", primaryLogPrefix(), pid)
|
||||
filename := fmt.Sprintf("%s-%d", PrimaryLogPrefix(), pid)
|
||||
var safeSuffix string
|
||||
if suffix != "" {
|
||||
safeSuffix = sanitizeLogSuffix(suffix)
|
||||
@@ -103,6 +107,8 @@ func NewLoggerWithSuffix(suffix string) (*Logger, error) {
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
|
||||
l.zlogger = zerolog.New(l.writer).With().Timestamp().Logger()
|
||||
|
||||
l.workerWG.Add(1)
|
||||
go l.run()
|
||||
|
||||
@@ -184,17 +190,24 @@ func (l *Logger) Path() string {
|
||||
return l.path
|
||||
}
|
||||
|
||||
func (l *Logger) IsClosed() bool {
|
||||
if l == nil {
|
||||
return true
|
||||
}
|
||||
return l.closed.Load()
|
||||
}
|
||||
|
||||
// Info logs at INFO level.
|
||||
func (l *Logger) Info(msg string) { l.log("INFO", msg) }
|
||||
func (l *Logger) Info(msg string) { l.logWithLevel(zerolog.InfoLevel, msg) }
|
||||
|
||||
// Warn logs at WARN level.
|
||||
func (l *Logger) Warn(msg string) { l.log("WARN", msg) }
|
||||
func (l *Logger) Warn(msg string) { l.logWithLevel(zerolog.WarnLevel, msg) }
|
||||
|
||||
// Debug logs at DEBUG level.
|
||||
func (l *Logger) Debug(msg string) { l.log("DEBUG", msg) }
|
||||
func (l *Logger) Debug(msg string) { l.logWithLevel(zerolog.DebugLevel, msg) }
|
||||
|
||||
// Error logs at ERROR level.
|
||||
func (l *Logger) Error(msg string) { l.log("ERROR", msg) }
|
||||
func (l *Logger) Error(msg string) { l.logWithLevel(zerolog.ErrorLevel, msg) }
|
||||
|
||||
// Close signals the worker to flush and close the log file.
|
||||
// The log file is NOT removed, allowing inspection after program exit.
|
||||
@@ -335,7 +348,7 @@ func (l *Logger) Flush() {
|
||||
}
|
||||
}
|
||||
|
||||
func (l *Logger) log(level, msg string) {
|
||||
func (l *Logger) logWithLevel(entryLevel zerolog.Level, msg string) {
|
||||
if l == nil {
|
||||
return
|
||||
}
|
||||
@@ -343,8 +356,8 @@ func (l *Logger) log(level, msg string) {
|
||||
return
|
||||
}
|
||||
|
||||
isError := level == "WARN" || level == "ERROR"
|
||||
entry := logEntry{msg: msg, isError: isError}
|
||||
isError := entryLevel == zerolog.WarnLevel || entryLevel == zerolog.ErrorLevel
|
||||
entry := logEntry{msg: msg, level: entryLevel, isError: isError}
|
||||
l.flushMu.Lock()
|
||||
l.pendingWG.Add(1)
|
||||
l.flushMu.Unlock()
|
||||
@@ -366,8 +379,7 @@ func (l *Logger) run() {
|
||||
defer ticker.Stop()
|
||||
|
||||
writeEntry := func(entry logEntry) {
|
||||
timestamp := time.Now().Format("2006-01-02 15:04:05.000")
|
||||
fmt.Fprintf(l.writer, "[%s] %s\n", timestamp, entry.msg)
|
||||
l.zlogger.WithLevel(entry.level).Msg(entry.msg)
|
||||
|
||||
// Cache error/warn entries in memory for fast extraction
|
||||
if entry.isError {
|
||||
@@ -439,10 +451,7 @@ func cleanupOldLogs() (CleanupStats, error) {
|
||||
var stats CleanupStats
|
||||
tempDir := os.TempDir()
|
||||
|
||||
prefixes := logPrefixes()
|
||||
if len(prefixes) == 0 {
|
||||
prefixes = []string{defaultWrapperName}
|
||||
}
|
||||
prefixes := LogPrefixes()
|
||||
|
||||
seen := make(map[string]struct{})
|
||||
var matches []string
|
||||
@@ -473,7 +482,8 @@ func cleanupOldLogs() (CleanupStats, error) {
|
||||
stats.Kept++
|
||||
stats.KeptFiles = append(stats.KeptFiles, filename)
|
||||
if reason != "" {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs: skipping %s: %s", filename, reason))
|
||||
// Use Debug level to avoid polluting Recent Errors with cleanup noise
|
||||
logDebug(fmt.Sprintf("cleanupOldLogs: skipping %s: %s", filename, reason))
|
||||
}
|
||||
continue
|
||||
}
|
||||
@@ -591,10 +601,7 @@ func isPIDReused(logPath string, pid int) bool {
|
||||
if procStartTime.IsZero() {
|
||||
// Can't determine process start time
|
||||
// Check if file is very old (>7 days), likely from a dead process
|
||||
if time.Since(fileModTime) > 7*24*time.Hour {
|
||||
return true // File is old enough to be from a different process
|
||||
}
|
||||
return false // Be conservative for recent files
|
||||
return time.Since(fileModTime) > 7*24*time.Hour
|
||||
}
|
||||
|
||||
// If the log file was modified before the process started, PID was reused
|
||||
@@ -604,10 +611,7 @@ func isPIDReused(logPath string, pid int) bool {
|
||||
|
||||
func parsePIDFromLog(path string) (int, bool) {
|
||||
name := filepath.Base(path)
|
||||
prefixes := logPrefixes()
|
||||
if len(prefixes) == 0 {
|
||||
prefixes = []string{defaultWrapperName}
|
||||
}
|
||||
prefixes := LogPrefixes()
|
||||
|
||||
for _, prefix := range prefixes {
|
||||
prefixWithDash := fmt.Sprintf("%s-", prefix)
|
||||
@@ -661,3 +665,19 @@ func renderWorkerLimit(limit int) string {
|
||||
}
|
||||
return strconv.Itoa(limit)
|
||||
}
|
||||
|
||||
func CleanupOldLogs() (CleanupStats, error) { return cleanupOldLogs() }
|
||||
|
||||
func IsUnsafeFile(path string, tempDir string) (bool, string) { return isUnsafeFile(path, tempDir) }
|
||||
|
||||
func IsPIDReused(logPath string, pid int) bool { return isPIDReused(logPath, pid) }
|
||||
|
||||
func ParsePIDFromLog(path string) (int, bool) { return parsePIDFromLog(path) }
|
||||
|
||||
func LogConcurrencyPlanning(limit, total int) { logConcurrencyPlanning(limit, total) }
|
||||
|
||||
func LogConcurrencyState(event, taskID string, active, limit int) {
|
||||
logConcurrencyState(event, taskID, active, limit)
|
||||
}
|
||||
|
||||
func SanitizeLogSuffix(raw string) string { return sanitizeLogSuffix(raw) }
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package logger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@@ -28,7 +28,7 @@ func TestLoggerConcurrencyLogHelpers(t *testing.T) {
|
||||
t.Fatalf("NewLoggerWithSuffix error: %v", err)
|
||||
}
|
||||
setLogger(logger)
|
||||
defer closeLogger()
|
||||
defer func() { _ = closeLogger() }()
|
||||
|
||||
logConcurrencyPlanning(0, 2)
|
||||
logConcurrencyPlanning(3, 2)
|
||||
@@ -64,8 +64,8 @@ func TestLoggerConcurrencyLogHelpersNoopWithoutActiveLogger(t *testing.T) {
|
||||
func TestLoggerCleanupOldLogsSkipsUnsafeAndHandlesAlreadyDeleted(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
unsafePath := createTempLog(t, tempDir, fmt.Sprintf("%s-%d.log", primaryLogPrefix(), 222))
|
||||
orphanPath := createTempLog(t, tempDir, fmt.Sprintf("%s-%d.log", primaryLogPrefix(), 111))
|
||||
unsafePath := createTempLog(t, tempDir, fmt.Sprintf("%s-%d.log", PrimaryLogPrefix(), 222))
|
||||
orphanPath := createTempLog(t, tempDir, fmt.Sprintf("%s-%d.log", PrimaryLogPrefix(), 111))
|
||||
|
||||
stubFileStat(t, func(path string) (os.FileInfo, error) {
|
||||
if path == unsafePath {
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package logger
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@@ -26,12 +26,12 @@ func TestLoggerWithSuffixNamingAndIsolation(t *testing.T) {
|
||||
}
|
||||
defer loggerB.Close()
|
||||
|
||||
wantA := filepath.Join(tempDir, fmt.Sprintf("%s-%d-%s.log", primaryLogPrefix(), os.Getpid(), taskA))
|
||||
wantA := filepath.Join(tempDir, fmt.Sprintf("%s-%d-%s.log", PrimaryLogPrefix(), os.Getpid(), taskA))
|
||||
if loggerA.Path() != wantA {
|
||||
t.Fatalf("loggerA path = %q, want %q", loggerA.Path(), wantA)
|
||||
}
|
||||
|
||||
wantB := filepath.Join(tempDir, fmt.Sprintf("%s-%d-%s.log", primaryLogPrefix(), os.Getpid(), taskB))
|
||||
wantB := filepath.Join(tempDir, fmt.Sprintf("%s-%d-%s.log", PrimaryLogPrefix(), os.Getpid(), taskB))
|
||||
if loggerB.Path() != wantB {
|
||||
t.Fatalf("loggerB path = %q, want %q", loggerB.Path(), wantB)
|
||||
}
|
||||
@@ -105,7 +105,7 @@ func TestLoggerWithSuffixSanitizesUnsafeSuffix(t *testing.T) {
|
||||
_ = os.Remove(logger.Path())
|
||||
})
|
||||
|
||||
wantBase := fmt.Sprintf("%s-%d-%s.log", primaryLogPrefix(), os.Getpid(), safe)
|
||||
wantBase := fmt.Sprintf("%s-%d-%s.log", PrimaryLogPrefix(), os.Getpid(), safe)
|
||||
if gotBase := filepath.Base(logger.Path()); gotBase != wantBase {
|
||||
t.Fatalf("log filename = %q, want %q", gotBase, wantBase)
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package logger
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"fmt"
|
||||
"math"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
@@ -77,30 +76,6 @@ func TestLoggerWritesLevels(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerDefaultIsTerminalCoverage(t *testing.T) {
|
||||
oldStdin := os.Stdin
|
||||
t.Cleanup(func() { os.Stdin = oldStdin })
|
||||
|
||||
f, err := os.CreateTemp(t.TempDir(), "stdin-*")
|
||||
if err != nil {
|
||||
t.Fatalf("os.CreateTemp() error = %v", err)
|
||||
}
|
||||
defer os.Remove(f.Name())
|
||||
|
||||
os.Stdin = f
|
||||
if got := defaultIsTerminal(); got {
|
||||
t.Fatalf("defaultIsTerminal() = %v, want false for regular file", got)
|
||||
}
|
||||
|
||||
if err := f.Close(); err != nil {
|
||||
t.Fatalf("Close() error = %v", err)
|
||||
}
|
||||
os.Stdin = f
|
||||
if got := defaultIsTerminal(); !got {
|
||||
t.Fatalf("defaultIsTerminal() = %v, want true when Stat fails", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerCloseStopsWorkerAndKeepsFile(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
@@ -118,11 +93,6 @@ func TestLoggerCloseStopsWorkerAndKeepsFile(t *testing.T) {
|
||||
if err := logger.Close(); err != nil {
|
||||
t.Fatalf("Close() returned error: %v", err)
|
||||
}
|
||||
if logger.file != nil {
|
||||
if _, err := logger.file.Write([]byte("x")); err == nil {
|
||||
t.Fatalf("expected file to be closed after Close()")
|
||||
}
|
||||
}
|
||||
|
||||
// After recent changes, log file is kept for debugging - NOT removed
|
||||
if _, err := os.Stat(logPath); os.IsNotExist(err) {
|
||||
@@ -131,18 +101,6 @@ func TestLoggerCloseStopsWorkerAndKeepsFile(t *testing.T) {
|
||||
|
||||
// Clean up manually for test
|
||||
defer os.Remove(logPath)
|
||||
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
logger.workerWG.Wait()
|
||||
close(done)
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-done:
|
||||
case <-time.After(200 * time.Millisecond):
|
||||
t.Fatalf("worker goroutine did not exit after Close")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerConcurrentWritesSafe(t *testing.T) {
|
||||
@@ -194,50 +152,13 @@ func TestLoggerConcurrentWritesSafe(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerTerminateProcessActive(t *testing.T) {
|
||||
cmd := exec.Command("sleep", "5")
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Skipf("cannot start sleep command: %v", err)
|
||||
}
|
||||
|
||||
timer := terminateProcess(&realCmd{cmd: cmd})
|
||||
if timer == nil {
|
||||
t.Fatalf("terminateProcess returned nil timer for active process")
|
||||
}
|
||||
defer timer.Stop()
|
||||
|
||||
done := make(chan error, 1)
|
||||
go func() {
|
||||
done <- cmd.Wait()
|
||||
}()
|
||||
|
||||
select {
|
||||
case <-time.After(500 * time.Millisecond):
|
||||
t.Fatalf("process not terminated promptly")
|
||||
case <-done:
|
||||
}
|
||||
|
||||
// Force the timer callback to run immediately to cover the kill branch.
|
||||
timer.Reset(0)
|
||||
time.Sleep(10 * time.Millisecond)
|
||||
}
|
||||
|
||||
func TestLoggerTerminateProcessNil(t *testing.T) {
|
||||
if timer := terminateProcess(nil); timer != nil {
|
||||
t.Fatalf("terminateProcess(nil) should return nil timer")
|
||||
}
|
||||
if timer := terminateProcess(&realCmd{cmd: &exec.Cmd{}}); timer != nil {
|
||||
t.Fatalf("terminateProcess with nil process should return nil timer")
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerCleanupOldLogsRemovesOrphans(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
orphan1 := createTempLog(t, tempDir, "codex-wrapper-111.log")
|
||||
orphan2 := createTempLog(t, tempDir, "codex-wrapper-222-suffix.log")
|
||||
running1 := createTempLog(t, tempDir, "codex-wrapper-333.log")
|
||||
running2 := createTempLog(t, tempDir, "codex-wrapper-444-extra-info.log")
|
||||
orphan1 := createTempLog(t, tempDir, "codeagent-wrapper-111.log")
|
||||
orphan2 := createTempLog(t, tempDir, "codeagent-wrapper-222-suffix.log")
|
||||
running1 := createTempLog(t, tempDir, "codeagent-wrapper-333.log")
|
||||
running2 := createTempLog(t, tempDir, "codeagent-wrapper-444-extra-info.log")
|
||||
untouched := createTempLog(t, tempDir, "unrelated.log")
|
||||
|
||||
runningPIDs := map[int]bool{333: true, 444: true}
|
||||
@@ -285,15 +206,15 @@ func TestLoggerCleanupOldLogsHandlesInvalidNamesAndErrors(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
invalid := []string{
|
||||
"codex-wrapper-.log",
|
||||
"codex-wrapper.log",
|
||||
"codex-wrapper-foo-bar.txt",
|
||||
"codeagent-wrapper-.log",
|
||||
"codeagent-wrapper.log",
|
||||
"codeagent-wrapper-foo-bar.txt",
|
||||
"not-a-codex.log",
|
||||
}
|
||||
for _, name := range invalid {
|
||||
createTempLog(t, tempDir, name)
|
||||
}
|
||||
target := createTempLog(t, tempDir, "codex-wrapper-555-extra.log")
|
||||
target := createTempLog(t, tempDir, "codeagent-wrapper-555-extra.log")
|
||||
|
||||
var checked []int
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
@@ -389,8 +310,8 @@ func TestLoggerCleanupOldLogsHandlesTempDirPermissionErrors(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
paths := []string{
|
||||
createTempLog(t, tempDir, "codex-wrapper-6100.log"),
|
||||
createTempLog(t, tempDir, "codex-wrapper-6101.log"),
|
||||
createTempLog(t, tempDir, "codeagent-wrapper-6100.log"),
|
||||
createTempLog(t, tempDir, "codeagent-wrapper-6101.log"),
|
||||
}
|
||||
|
||||
stubProcessRunning(t, func(int) bool { return false })
|
||||
@@ -428,8 +349,8 @@ func TestLoggerCleanupOldLogsHandlesTempDirPermissionErrors(t *testing.T) {
|
||||
func TestLoggerCleanupOldLogsHandlesPermissionDeniedFile(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
protected := createTempLog(t, tempDir, "codex-wrapper-6200.log")
|
||||
deletable := createTempLog(t, tempDir, "codex-wrapper-6201.log")
|
||||
protected := createTempLog(t, tempDir, "codeagent-wrapper-6200.log")
|
||||
deletable := createTempLog(t, tempDir, "codeagent-wrapper-6201.log")
|
||||
|
||||
stubProcessRunning(t, func(int) bool { return false })
|
||||
stubProcessStartTime(t, func(int) time.Time { return time.Time{} })
|
||||
@@ -468,7 +389,7 @@ func TestLoggerCleanupOldLogsPerformanceBound(t *testing.T) {
|
||||
const fileCount = 400
|
||||
fakePaths := make([]string, fileCount)
|
||||
for i := 0; i < fileCount; i++ {
|
||||
name := fmt.Sprintf("codex-wrapper-%d.log", 10000+i)
|
||||
name := fmt.Sprintf("codeagent-wrapper-%d.log", 10000+i)
|
||||
fakePaths[i] = createTempLog(t, tempDir, name)
|
||||
}
|
||||
|
||||
@@ -505,102 +426,11 @@ func TestLoggerCleanupOldLogsPerformanceBound(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerCleanupOldLogsCoverageSuite(t *testing.T) {
|
||||
TestBackendParseJSONStream_CoverageSuite(t)
|
||||
}
|
||||
|
||||
// Reuse the existing coverage suite so the focused TestLogger run still exercises
|
||||
// the rest of the codebase and keeps coverage high.
|
||||
func TestLoggerCoverageSuite(t *testing.T) {
|
||||
suite := []struct {
|
||||
name string
|
||||
fn func(*testing.T)
|
||||
}{
|
||||
{"TestBackendParseJSONStream_CoverageSuite", TestBackendParseJSONStream_CoverageSuite},
|
||||
{"TestVersionCoverageFullRun", TestVersionCoverageFullRun},
|
||||
{"TestVersionMainWrapper", TestVersionMainWrapper},
|
||||
|
||||
{"TestExecutorHelperCoverage", TestExecutorHelperCoverage},
|
||||
{"TestExecutorRunCodexTaskWithContext", TestExecutorRunCodexTaskWithContext},
|
||||
{"TestExecutorParallelLogIsolation", TestExecutorParallelLogIsolation},
|
||||
{"TestExecutorTaskLoggerContext", TestExecutorTaskLoggerContext},
|
||||
{"TestExecutorExecuteConcurrentWithContextBranches", TestExecutorExecuteConcurrentWithContextBranches},
|
||||
{"TestExecutorSignalAndTermination", TestExecutorSignalAndTermination},
|
||||
{"TestExecutorCancelReasonAndCloseWithReason", TestExecutorCancelReasonAndCloseWithReason},
|
||||
{"TestExecutorForceKillTimerStop", TestExecutorForceKillTimerStop},
|
||||
{"TestExecutorForwardSignalsDefaults", TestExecutorForwardSignalsDefaults},
|
||||
|
||||
{"TestBackendParseArgs_NewMode", TestBackendParseArgs_NewMode},
|
||||
{"TestBackendParseArgs_ResumeMode", TestBackendParseArgs_ResumeMode},
|
||||
{"TestBackendParseArgs_BackendFlag", TestBackendParseArgs_BackendFlag},
|
||||
{"TestBackendParseArgs_SkipPermissions", TestBackendParseArgs_SkipPermissions},
|
||||
{"TestBackendParseBoolFlag", TestBackendParseBoolFlag},
|
||||
{"TestBackendEnvFlagEnabled", TestBackendEnvFlagEnabled},
|
||||
{"TestRunResolveTimeout", TestRunResolveTimeout},
|
||||
{"TestRunIsTerminal", TestRunIsTerminal},
|
||||
{"TestRunReadPipedTask", TestRunReadPipedTask},
|
||||
{"TestTailBufferWrite", TestTailBufferWrite},
|
||||
{"TestLogWriterWriteLimitsBuffer", TestLogWriterWriteLimitsBuffer},
|
||||
{"TestLogWriterLogLine", TestLogWriterLogLine},
|
||||
{"TestNewLogWriterDefaultMaxLen", TestNewLogWriterDefaultMaxLen},
|
||||
{"TestNewLogWriterDefaultLimit", TestNewLogWriterDefaultLimit},
|
||||
{"TestRunHello", TestRunHello},
|
||||
{"TestRunGreet", TestRunGreet},
|
||||
{"TestRunFarewell", TestRunFarewell},
|
||||
{"TestRunFarewellEmpty", TestRunFarewellEmpty},
|
||||
|
||||
{"TestParallelParseConfig_Success", TestParallelParseConfig_Success},
|
||||
{"TestParallelParseConfig_Backend", TestParallelParseConfig_Backend},
|
||||
{"TestParallelParseConfig_InvalidFormat", TestParallelParseConfig_InvalidFormat},
|
||||
{"TestParallelParseConfig_EmptyTasks", TestParallelParseConfig_EmptyTasks},
|
||||
{"TestParallelParseConfig_MissingID", TestParallelParseConfig_MissingID},
|
||||
{"TestParallelParseConfig_MissingTask", TestParallelParseConfig_MissingTask},
|
||||
{"TestParallelParseConfig_DuplicateID", TestParallelParseConfig_DuplicateID},
|
||||
{"TestParallelParseConfig_DelimiterFormat", TestParallelParseConfig_DelimiterFormat},
|
||||
|
||||
{"TestBackendSelectBackend", TestBackendSelectBackend},
|
||||
{"TestBackendSelectBackend_Invalid", TestBackendSelectBackend_Invalid},
|
||||
{"TestBackendSelectBackend_DefaultOnEmpty", TestBackendSelectBackend_DefaultOnEmpty},
|
||||
{"TestBackendBuildArgs_CodexBackend", TestBackendBuildArgs_CodexBackend},
|
||||
{"TestBackendBuildArgs_ClaudeBackend", TestBackendBuildArgs_ClaudeBackend},
|
||||
{"TestClaudeBackendBuildArgs_OutputValidation", TestClaudeBackendBuildArgs_OutputValidation},
|
||||
{"TestBackendBuildArgs_GeminiBackend", TestBackendBuildArgs_GeminiBackend},
|
||||
{"TestGeminiBackendBuildArgs_OutputValidation", TestGeminiBackendBuildArgs_OutputValidation},
|
||||
{"TestBackendNamesAndCommands", TestBackendNamesAndCommands},
|
||||
|
||||
{"TestBackendParseJSONStream", TestBackendParseJSONStream},
|
||||
{"TestBackendParseJSONStream_ClaudeEvents", TestBackendParseJSONStream_ClaudeEvents},
|
||||
{"TestBackendParseJSONStream_GeminiEvents", TestBackendParseJSONStream_GeminiEvents},
|
||||
{"TestBackendParseJSONStreamWithWarn_InvalidLine", TestBackendParseJSONStreamWithWarn_InvalidLine},
|
||||
{"TestBackendParseJSONStream_OnMessage", TestBackendParseJSONStream_OnMessage},
|
||||
{"TestBackendParseJSONStream_ScannerError", TestBackendParseJSONStream_ScannerError},
|
||||
{"TestBackendDiscardInvalidJSON", TestBackendDiscardInvalidJSON},
|
||||
{"TestBackendDiscardInvalidJSONBuffer", TestBackendDiscardInvalidJSONBuffer},
|
||||
|
||||
{"TestCurrentWrapperNameFallsBackToExecutable", TestCurrentWrapperNameFallsBackToExecutable},
|
||||
{"TestCurrentWrapperNameDetectsLegacyAliasSymlink", TestCurrentWrapperNameDetectsLegacyAliasSymlink},
|
||||
|
||||
{"TestIsProcessRunning", TestIsProcessRunning},
|
||||
{"TestGetProcessStartTimeReadsProcStat", TestGetProcessStartTimeReadsProcStat},
|
||||
{"TestGetProcessStartTimeInvalidData", TestGetProcessStartTimeInvalidData},
|
||||
{"TestGetBootTimeParsesBtime", TestGetBootTimeParsesBtime},
|
||||
{"TestGetBootTimeInvalidData", TestGetBootTimeInvalidData},
|
||||
|
||||
{"TestClaudeBuildArgs_ModesAndPermissions", TestClaudeBuildArgs_ModesAndPermissions},
|
||||
{"TestClaudeBuildArgs_GeminiAndCodexModes", TestClaudeBuildArgs_GeminiAndCodexModes},
|
||||
{"TestClaudeBuildArgs_BackendMetadata", TestClaudeBuildArgs_BackendMetadata},
|
||||
}
|
||||
|
||||
for _, tc := range suite {
|
||||
t.Run(tc.name, tc.fn)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerCleanupOldLogsKeepsCurrentProcessLog(t *testing.T) {
|
||||
tempDir := setTempDirEnv(t, t.TempDir())
|
||||
|
||||
currentPID := os.Getpid()
|
||||
currentLog := createTempLog(t, tempDir, fmt.Sprintf("codex-wrapper-%d.log", currentPID))
|
||||
currentLog := createTempLog(t, tempDir, fmt.Sprintf("codeagent-wrapper-%d.log", currentPID))
|
||||
|
||||
stubProcessRunning(t, func(pid int) bool {
|
||||
if pid != currentPID {
|
||||
@@ -676,7 +506,7 @@ func TestLoggerIsUnsafeFileSecurityChecks(t *testing.T) {
|
||||
stubEvalSymlinks(t, func(path string) (string, error) {
|
||||
return filepath.Join(absTempDir, filepath.Base(path)), nil
|
||||
})
|
||||
unsafe, reason := isUnsafeFile(filepath.Join(absTempDir, "codex-wrapper-1.log"), tempDir)
|
||||
unsafe, reason := isUnsafeFile(filepath.Join(absTempDir, "codeagent-wrapper-1.log"), tempDir)
|
||||
if !unsafe || reason != "refusing to delete symlink" {
|
||||
t.Fatalf("expected symlink to be rejected, got unsafe=%v reason=%q", unsafe, reason)
|
||||
}
|
||||
@@ -702,9 +532,9 @@ func TestLoggerIsUnsafeFileSecurityChecks(t *testing.T) {
|
||||
})
|
||||
otherDir := t.TempDir()
|
||||
stubEvalSymlinks(t, func(string) (string, error) {
|
||||
return filepath.Join(otherDir, "codex-wrapper-9.log"), nil
|
||||
return filepath.Join(otherDir, "codeagent-wrapper-9.log"), nil
|
||||
})
|
||||
unsafe, reason := isUnsafeFile(filepath.Join(otherDir, "codex-wrapper-9.log"), tempDir)
|
||||
unsafe, reason := isUnsafeFile(filepath.Join(otherDir, "codeagent-wrapper-9.log"), tempDir)
|
||||
if !unsafe || reason != "file is outside tempDir" {
|
||||
t.Fatalf("expected outside file to be rejected, got unsafe=%v reason=%q", unsafe, reason)
|
||||
}
|
||||
@@ -713,15 +543,21 @@ func TestLoggerIsUnsafeFileSecurityChecks(t *testing.T) {
|
||||
|
||||
func TestLoggerPathAndRemove(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
path := filepath.Join(tempDir, "sample.log")
|
||||
if err := os.WriteFile(path, []byte("test"), 0o644); err != nil {
|
||||
t.Fatalf("failed to create temp file: %v", err)
|
||||
t.Setenv("TMPDIR", tempDir)
|
||||
|
||||
logger, err := NewLoggerWithSuffix("sample")
|
||||
if err != nil {
|
||||
t.Fatalf("NewLoggerWithSuffix() error = %v", err)
|
||||
}
|
||||
path := logger.Path()
|
||||
if path == "" {
|
||||
_ = logger.Close()
|
||||
t.Fatalf("logger.Path() returned empty path")
|
||||
}
|
||||
if err := logger.Close(); err != nil {
|
||||
t.Fatalf("Close() error = %v", err)
|
||||
}
|
||||
|
||||
logger := &Logger{path: path}
|
||||
if got := logger.Path(); got != path {
|
||||
t.Fatalf("Path() = %q, want %q", got, path)
|
||||
}
|
||||
if err := logger.RemoveLogFile(); err != nil {
|
||||
t.Fatalf("RemoveLogFile() error = %v", err)
|
||||
}
|
||||
@@ -738,43 +574,6 @@ func TestLoggerPathAndRemove(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerTruncateBytesCoverage(t *testing.T) {
|
||||
if got := truncateBytes([]byte("abc"), 3); got != "abc" {
|
||||
t.Fatalf("truncateBytes() = %q, want %q", got, "abc")
|
||||
}
|
||||
if got := truncateBytes([]byte("abcd"), 3); got != "abc..." {
|
||||
t.Fatalf("truncateBytes() = %q, want %q", got, "abc...")
|
||||
}
|
||||
if got := truncateBytes([]byte("abcd"), -1); got != "" {
|
||||
t.Fatalf("truncateBytes() = %q, want empty string", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerInternalLog(t *testing.T) {
|
||||
logger := &Logger{
|
||||
ch: make(chan logEntry, 1),
|
||||
done: make(chan struct{}),
|
||||
pendingWG: sync.WaitGroup{},
|
||||
}
|
||||
|
||||
done := make(chan logEntry, 1)
|
||||
go func() {
|
||||
entry := <-logger.ch
|
||||
logger.pendingWG.Done()
|
||||
done <- entry
|
||||
}()
|
||||
|
||||
logger.log("INFO", "hello")
|
||||
entry := <-done
|
||||
if entry.msg != "hello" {
|
||||
t.Fatalf("unexpected entry %+v", entry)
|
||||
}
|
||||
|
||||
logger.closed.Store(true)
|
||||
logger.log("INFO", "ignored")
|
||||
close(logger.done)
|
||||
}
|
||||
|
||||
func TestLoggerParsePIDFromLog(t *testing.T) {
|
||||
hugePID := strconv.FormatInt(math.MaxInt64, 10) + "0"
|
||||
tests := []struct {
|
||||
@@ -782,13 +581,13 @@ func TestLoggerParsePIDFromLog(t *testing.T) {
|
||||
pid int
|
||||
ok bool
|
||||
}{
|
||||
{"codex-wrapper-123.log", 123, true},
|
||||
{"codex-wrapper-999-extra.log", 999, true},
|
||||
{"codex-wrapper-.log", 0, false},
|
||||
{"codeagent-wrapper-123.log", 123, true},
|
||||
{"codeagent-wrapper-999-extra.log", 999, true},
|
||||
{"codeagent-wrapper-.log", 0, false},
|
||||
{"invalid-name.log", 0, false},
|
||||
{"codex-wrapper--5.log", 0, false},
|
||||
{"codex-wrapper-0.log", 0, false},
|
||||
{fmt.Sprintf("codex-wrapper-%s.log", hugePID), 0, false},
|
||||
{"codeagent-wrapper--5.log", 0, false},
|
||||
{"codeagent-wrapper-0.log", 0, false},
|
||||
{fmt.Sprintf("codeagent-wrapper-%s.log", hugePID), 0, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
@@ -827,56 +626,32 @@ func setTempDirEnv(t *testing.T, dir string) string {
|
||||
|
||||
func stubProcessRunning(t *testing.T, fn func(int) bool) {
|
||||
t.Helper()
|
||||
original := processRunningCheck
|
||||
processRunningCheck = fn
|
||||
t.Cleanup(func() {
|
||||
processRunningCheck = original
|
||||
})
|
||||
t.Cleanup(SetProcessRunningCheck(fn))
|
||||
}
|
||||
|
||||
func stubProcessStartTime(t *testing.T, fn func(int) time.Time) {
|
||||
t.Helper()
|
||||
original := processStartTimeFn
|
||||
processStartTimeFn = fn
|
||||
t.Cleanup(func() {
|
||||
processStartTimeFn = original
|
||||
})
|
||||
t.Cleanup(SetProcessStartTimeFn(fn))
|
||||
}
|
||||
|
||||
func stubRemoveLogFile(t *testing.T, fn func(string) error) {
|
||||
t.Helper()
|
||||
original := removeLogFileFn
|
||||
removeLogFileFn = fn
|
||||
t.Cleanup(func() {
|
||||
removeLogFileFn = original
|
||||
})
|
||||
t.Cleanup(SetRemoveLogFileFn(fn))
|
||||
}
|
||||
|
||||
func stubGlobLogFiles(t *testing.T, fn func(string) ([]string, error)) {
|
||||
t.Helper()
|
||||
original := globLogFiles
|
||||
globLogFiles = fn
|
||||
t.Cleanup(func() {
|
||||
globLogFiles = original
|
||||
})
|
||||
t.Cleanup(SetGlobLogFilesFn(fn))
|
||||
}
|
||||
|
||||
func stubFileStat(t *testing.T, fn func(string) (os.FileInfo, error)) {
|
||||
t.Helper()
|
||||
original := fileStatFn
|
||||
fileStatFn = fn
|
||||
t.Cleanup(func() {
|
||||
fileStatFn = original
|
||||
})
|
||||
t.Cleanup(SetFileStatFn(fn))
|
||||
}
|
||||
|
||||
func stubEvalSymlinks(t *testing.T, fn func(string) (string, error)) {
|
||||
t.Helper()
|
||||
original := evalSymlinksFn
|
||||
evalSymlinksFn = fn
|
||||
t.Cleanup(func() {
|
||||
evalSymlinksFn = original
|
||||
})
|
||||
t.Cleanup(SetEvalSymlinksFn(fn))
|
||||
}
|
||||
|
||||
type fakeFileInfo struct {
|
||||
@@ -960,7 +735,7 @@ func TestLoggerExtractRecentErrors(t *testing.T) {
|
||||
t.Fatalf("NewLoggerWithSuffix() error = %v", err)
|
||||
}
|
||||
defer logger.Close()
|
||||
defer logger.RemoveLogFile()
|
||||
defer func() { _ = logger.RemoveLogFile() }()
|
||||
|
||||
// Write logs using logger methods
|
||||
for _, entry := range tt.logs {
|
||||
@@ -1000,14 +775,14 @@ func TestLoggerExtractRecentErrorsNilLogger(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestLoggerExtractRecentErrorsEmptyPath(t *testing.T) {
|
||||
logger := &Logger{path: ""}
|
||||
logger := &Logger{}
|
||||
if got := logger.ExtractRecentErrors(10); got != nil {
|
||||
t.Fatalf("empty path ExtractRecentErrors() should return nil, got %v", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoggerExtractRecentErrorsFileNotExist(t *testing.T) {
|
||||
logger := &Logger{path: "/nonexistent/path/to/log.log"}
|
||||
logger := &Logger{}
|
||||
if got := logger.ExtractRecentErrors(10); got != nil {
|
||||
t.Fatalf("nonexistent file ExtractRecentErrors() should return nil, got %v", got)
|
||||
}
|
||||
@@ -1049,7 +824,7 @@ func TestExtractRecentErrorsBoundaryCheck(t *testing.T) {
|
||||
t.Fatalf("NewLoggerWithSuffix() error = %v", err)
|
||||
}
|
||||
defer logger.Close()
|
||||
defer logger.RemoveLogFile()
|
||||
defer func() { _ = logger.RemoveLogFile() }()
|
||||
|
||||
// Write some errors
|
||||
logger.Error("error 1")
|
||||
@@ -1082,7 +857,7 @@ func TestErrorEntriesMaxLimit(t *testing.T) {
|
||||
t.Fatalf("NewLoggerWithSuffix() error = %v", err)
|
||||
}
|
||||
defer logger.Close()
|
||||
defer logger.RemoveLogFile()
|
||||
defer func() { _ = logger.RemoveLogFile() }()
|
||||
|
||||
// Write 150 error/warn entries
|
||||
for i := 1; i <= 150; i++ {
|
||||
63
codeagent-wrapper/internal/logger/process_check.go
Normal file
63
codeagent-wrapper/internal/logger/process_check.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/shirou/gopsutil/v3/process"
|
||||
)
|
||||
|
||||
func pidToInt32(pid int) (int32, bool) {
|
||||
if pid <= 0 || pid > math.MaxInt32 {
|
||||
return 0, false
|
||||
}
|
||||
return int32(pid), true
|
||||
}
|
||||
|
||||
// isProcessRunning reports whether a process with the given pid appears to be running.
|
||||
// It is intentionally conservative on errors to avoid deleting logs for live processes.
|
||||
func isProcessRunning(pid int) bool {
|
||||
pid32, ok := pidToInt32(pid)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
exists, err := process.PidExists(pid32)
|
||||
if err == nil {
|
||||
return exists
|
||||
}
|
||||
|
||||
// If we can positively identify that the process doesn't exist, report false.
|
||||
if errors.Is(err, process.ErrorProcessNotRunning) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Permission/inspection failures: assume it's running to be safe.
|
||||
return true
|
||||
}
|
||||
|
||||
// getProcessStartTime returns the start time of a process.
|
||||
// Returns zero time if the start time cannot be determined.
|
||||
func getProcessStartTime(pid int) time.Time {
|
||||
pid32, ok := pidToInt32(pid)
|
||||
if !ok {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
proc, err := process.NewProcess(pid32)
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
ms, err := proc.CreateTime()
|
||||
if err != nil || ms <= 0 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
return time.UnixMilli(ms)
|
||||
}
|
||||
|
||||
func IsProcessRunning(pid int) bool { return isProcessRunning(pid) }
|
||||
|
||||
func GetProcessStartTime(pid int) time.Time { return getProcessStartTime(pid) }
|
||||
112
codeagent-wrapper/internal/logger/process_check_test.go
Normal file
112
codeagent-wrapper/internal/logger/process_check_test.go
Normal file
@@ -0,0 +1,112 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"math"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestIsProcessRunning(t *testing.T) {
|
||||
t.Run("boundary values", func(t *testing.T) {
|
||||
if isProcessRunning(0) {
|
||||
t.Fatalf("pid 0 should never be treated as running")
|
||||
}
|
||||
if isProcessRunning(-1) {
|
||||
t.Fatalf("negative pid should never be treated as running")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("pid out of int32 range", func(t *testing.T) {
|
||||
if strconv.IntSize <= 32 {
|
||||
t.Skip("int cannot represent values above int32 range")
|
||||
}
|
||||
|
||||
pid := int(int64(math.MaxInt32) + 1)
|
||||
if isProcessRunning(pid) {
|
||||
t.Fatalf("expected pid %d (out of int32 range) to be treated as not running", pid)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("current process", func(t *testing.T) {
|
||||
if !isProcessRunning(os.Getpid()) {
|
||||
t.Fatalf("expected current process (pid=%d) to be running", os.Getpid())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fake pid", func(t *testing.T) {
|
||||
const nonexistentPID = 1 << 30
|
||||
if isProcessRunning(nonexistentPID) {
|
||||
t.Fatalf("expected pid %d to be reported as not running", nonexistentPID)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("terminated process", func(t *testing.T) {
|
||||
pid := exitedProcessPID(t)
|
||||
if isProcessRunning(pid) {
|
||||
t.Fatalf("expected exited child process (pid=%d) to be reported as not running", pid)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func exitedProcessPID(t *testing.T) int {
|
||||
t.Helper()
|
||||
|
||||
var cmd *exec.Cmd
|
||||
if runtime.GOOS == "windows" {
|
||||
cmd = exec.Command("cmd", "/c", "exit 0")
|
||||
} else {
|
||||
cmd = exec.Command("sh", "-c", "exit 0")
|
||||
}
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("failed to start helper process: %v", err)
|
||||
}
|
||||
pid := cmd.Process.Pid
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
t.Fatalf("helper process did not exit cleanly: %v", err)
|
||||
}
|
||||
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
return pid
|
||||
}
|
||||
|
||||
func TestGetProcessStartTimeReadsProcStat(t *testing.T) {
|
||||
start := getProcessStartTime(os.Getpid())
|
||||
if start.IsZero() {
|
||||
t.Fatalf("expected non-zero start time for current process")
|
||||
}
|
||||
if start.After(time.Now().Add(5 * time.Second)) {
|
||||
t.Fatalf("start time is unexpectedly in the future: %v", start)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetProcessStartTimeInvalidData(t *testing.T) {
|
||||
if !getProcessStartTime(0).IsZero() {
|
||||
t.Fatalf("expected zero time for pid 0")
|
||||
}
|
||||
if !getProcessStartTime(-1).IsZero() {
|
||||
t.Fatalf("expected zero time for negative pid")
|
||||
}
|
||||
if !getProcessStartTime(1 << 30).IsZero() {
|
||||
t.Fatalf("expected zero time for non-existent pid")
|
||||
}
|
||||
if strconv.IntSize > 32 {
|
||||
pid := int(int64(math.MaxInt32) + 1)
|
||||
if !getProcessStartTime(pid).IsZero() {
|
||||
t.Fatalf("expected zero time for pid %d (out of int32 range)", pid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBootTimeParsesBtime(t *testing.T) {
|
||||
t.Skip("legacy boot-time probing removed; start time now uses gopsutil")
|
||||
}
|
||||
|
||||
func TestGetBootTimeInvalidData(t *testing.T) {
|
||||
t.Skip("legacy boot-time probing removed; start time now uses gopsutil")
|
||||
}
|
||||
67
codeagent-wrapper/internal/logger/testhooks.go
Normal file
67
codeagent-wrapper/internal/logger/testhooks.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package logger
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
func SetProcessRunningCheck(fn func(int) bool) (restore func()) {
|
||||
prev := processRunningCheck
|
||||
if fn != nil {
|
||||
processRunningCheck = fn
|
||||
} else {
|
||||
processRunningCheck = isProcessRunning
|
||||
}
|
||||
return func() { processRunningCheck = prev }
|
||||
}
|
||||
|
||||
func SetProcessStartTimeFn(fn func(int) time.Time) (restore func()) {
|
||||
prev := processStartTimeFn
|
||||
if fn != nil {
|
||||
processStartTimeFn = fn
|
||||
} else {
|
||||
processStartTimeFn = getProcessStartTime
|
||||
}
|
||||
return func() { processStartTimeFn = prev }
|
||||
}
|
||||
|
||||
func SetRemoveLogFileFn(fn func(string) error) (restore func()) {
|
||||
prev := removeLogFileFn
|
||||
if fn != nil {
|
||||
removeLogFileFn = fn
|
||||
} else {
|
||||
removeLogFileFn = os.Remove
|
||||
}
|
||||
return func() { removeLogFileFn = prev }
|
||||
}
|
||||
|
||||
func SetGlobLogFilesFn(fn func(string) ([]string, error)) (restore func()) {
|
||||
prev := globLogFiles
|
||||
if fn != nil {
|
||||
globLogFiles = fn
|
||||
} else {
|
||||
globLogFiles = filepath.Glob
|
||||
}
|
||||
return func() { globLogFiles = prev }
|
||||
}
|
||||
|
||||
func SetFileStatFn(fn func(string) (os.FileInfo, error)) (restore func()) {
|
||||
prev := fileStatFn
|
||||
if fn != nil {
|
||||
fileStatFn = fn
|
||||
} else {
|
||||
fileStatFn = os.Lstat
|
||||
}
|
||||
return func() { fileStatFn = prev }
|
||||
}
|
||||
|
||||
func SetEvalSymlinksFn(fn func(string) (string, error)) (restore func()) {
|
||||
prev := evalSymlinksFn
|
||||
if fn != nil {
|
||||
evalSymlinksFn = fn
|
||||
} else {
|
||||
evalSymlinksFn = filepath.EvalSymlinks
|
||||
}
|
||||
return func() { evalSymlinksFn = prev }
|
||||
}
|
||||
13
codeagent-wrapper/internal/logger/wrapper_name.go
Normal file
13
codeagent-wrapper/internal/logger/wrapper_name.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package logger
|
||||
|
||||
// WrapperName is the fixed name for this tool.
|
||||
const WrapperName = "codeagent-wrapper"
|
||||
|
||||
// CurrentWrapperName returns the wrapper name (always "codeagent-wrapper").
|
||||
func CurrentWrapperName() string { return WrapperName }
|
||||
|
||||
// LogPrefixes returns the log file name prefixes to look for.
|
||||
func LogPrefixes() []string { return []string{WrapperName} }
|
||||
|
||||
// PrimaryLogPrefix returns the preferred filename prefix for log files.
|
||||
func PrimaryLogPrefix() string { return WrapperName }
|
||||
74
codeagent-wrapper/internal/parser/event.go
Normal file
74
codeagent-wrapper/internal/parser/event.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package parser
|
||||
|
||||
import "github.com/goccy/go-json"
|
||||
|
||||
// JSONEvent represents a Codex JSON output event.
|
||||
type JSONEvent struct {
|
||||
Type string `json:"type"`
|
||||
ThreadID string `json:"thread_id,omitempty"`
|
||||
Item *EventItem `json:"item,omitempty"`
|
||||
}
|
||||
|
||||
// EventItem represents the item field in a JSON event.
|
||||
type EventItem struct {
|
||||
Type string `json:"type"`
|
||||
Text interface{} `json:"text"`
|
||||
}
|
||||
|
||||
// ClaudeEvent for Claude stream-json format.
|
||||
type ClaudeEvent struct {
|
||||
Type string `json:"type"`
|
||||
Subtype string `json:"subtype,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Result string `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
// GeminiEvent for Gemini stream-json format.
|
||||
type GeminiEvent struct {
|
||||
Type string `json:"type"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Role string `json:"role,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Delta bool `json:"delta,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
// UnifiedEvent combines all backend event formats into a single structure
|
||||
// to avoid multiple JSON unmarshal operations per event.
|
||||
type UnifiedEvent struct {
|
||||
// Common fields
|
||||
Type string `json:"type"`
|
||||
|
||||
// Codex-specific fields
|
||||
ThreadID string `json:"thread_id,omitempty"`
|
||||
Item json.RawMessage `json:"item,omitempty"` // Lazy parse
|
||||
|
||||
// Claude-specific fields
|
||||
Subtype string `json:"subtype,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Result string `json:"result,omitempty"`
|
||||
|
||||
// Gemini-specific fields
|
||||
Role string `json:"role,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Delta *bool `json:"delta,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
|
||||
// Opencode-specific fields (camelCase sessionID)
|
||||
OpencodeSessionID string `json:"sessionID,omitempty"`
|
||||
Part json.RawMessage `json:"part,omitempty"`
|
||||
}
|
||||
|
||||
// OpencodePart represents the part field in opencode events.
|
||||
type OpencodePart struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text,omitempty"`
|
||||
Reason string `json:"reason,omitempty"`
|
||||
SessionID string `json:"sessionID,omitempty"`
|
||||
}
|
||||
|
||||
// ItemContent represents the parsed item.text field for Codex events.
|
||||
type ItemContent struct {
|
||||
Type string `json:"type"`
|
||||
Text interface{} `json:"text"`
|
||||
}
|
||||
@@ -1,102 +1,65 @@
|
||||
package main
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/goccy/go-json"
|
||||
)
|
||||
|
||||
// JSONEvent represents a Codex JSON output event
|
||||
type JSONEvent struct {
|
||||
Type string `json:"type"`
|
||||
ThreadID string `json:"thread_id,omitempty"`
|
||||
Item *EventItem `json:"item,omitempty"`
|
||||
}
|
||||
|
||||
// EventItem represents the item field in a JSON event
|
||||
type EventItem struct {
|
||||
Type string `json:"type"`
|
||||
Text interface{} `json:"text"`
|
||||
}
|
||||
|
||||
// ClaudeEvent for Claude stream-json format
|
||||
type ClaudeEvent struct {
|
||||
Type string `json:"type"`
|
||||
Subtype string `json:"subtype,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Result string `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
// GeminiEvent for Gemini stream-json format
|
||||
type GeminiEvent struct {
|
||||
Type string `json:"type"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Role string `json:"role,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Delta bool `json:"delta,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
}
|
||||
|
||||
func parseJSONStream(r io.Reader) (message, threadID string) {
|
||||
return parseJSONStreamWithLog(r, logWarn, logInfo)
|
||||
}
|
||||
|
||||
func parseJSONStreamWithWarn(r io.Reader, warnFn func(string)) (message, threadID string) {
|
||||
return parseJSONStreamWithLog(r, warnFn, logInfo)
|
||||
}
|
||||
|
||||
func parseJSONStreamWithLog(r io.Reader, warnFn func(string), infoFn func(string)) (message, threadID string) {
|
||||
return parseJSONStreamInternal(r, warnFn, infoFn, nil, nil)
|
||||
}
|
||||
|
||||
const (
|
||||
jsonLineReaderSize = 64 * 1024
|
||||
jsonLineMaxBytes = 10 * 1024 * 1024
|
||||
jsonLinePreviewBytes = 256
|
||||
)
|
||||
|
||||
type codexHeader struct {
|
||||
Type string `json:"type"`
|
||||
ThreadID string `json:"thread_id,omitempty"`
|
||||
Item *struct {
|
||||
Type string `json:"type"`
|
||||
} `json:"item,omitempty"`
|
||||
type lineScratch struct {
|
||||
buf []byte
|
||||
preview []byte
|
||||
}
|
||||
|
||||
// UnifiedEvent combines all backend event formats into a single structure
|
||||
// to avoid multiple JSON unmarshal operations per event
|
||||
type UnifiedEvent struct {
|
||||
// Common fields
|
||||
Type string `json:"type"`
|
||||
const maxPooledLineScratchCap = 1 << 20 // 1 MiB
|
||||
|
||||
// Codex-specific fields
|
||||
ThreadID string `json:"thread_id,omitempty"`
|
||||
Item json.RawMessage `json:"item,omitempty"` // Lazy parse
|
||||
|
||||
// Claude-specific fields
|
||||
Subtype string `json:"subtype,omitempty"`
|
||||
SessionID string `json:"session_id,omitempty"`
|
||||
Result string `json:"result,omitempty"`
|
||||
|
||||
// Gemini-specific fields
|
||||
Role string `json:"role,omitempty"`
|
||||
Content string `json:"content,omitempty"`
|
||||
Delta *bool `json:"delta,omitempty"`
|
||||
Status string `json:"status,omitempty"`
|
||||
var lineScratchPool = sync.Pool{
|
||||
New: func() any {
|
||||
return &lineScratch{
|
||||
buf: make([]byte, 0, jsonLineReaderSize),
|
||||
preview: make([]byte, 0, jsonLinePreviewBytes),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
// ItemContent represents the parsed item.text field for Codex events
|
||||
type ItemContent struct {
|
||||
Type string `json:"type"`
|
||||
Text interface{} `json:"text"`
|
||||
}
|
||||
|
||||
func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(string), onMessage func(), onComplete func()) (message, threadID string) {
|
||||
func ParseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(string), onMessage func(), onComplete func()) (message, threadID string) {
|
||||
reader := bufio.NewReaderSize(r, jsonLineReaderSize)
|
||||
scratch := lineScratchPool.Get().(*lineScratch)
|
||||
if scratch.buf == nil {
|
||||
scratch.buf = make([]byte, 0, jsonLineReaderSize)
|
||||
} else {
|
||||
scratch.buf = scratch.buf[:0]
|
||||
}
|
||||
if scratch.preview == nil {
|
||||
scratch.preview = make([]byte, 0, jsonLinePreviewBytes)
|
||||
} else {
|
||||
scratch.preview = scratch.preview[:0]
|
||||
}
|
||||
defer func() {
|
||||
if cap(scratch.buf) > maxPooledLineScratchCap {
|
||||
scratch.buf = nil
|
||||
} else if scratch.buf != nil {
|
||||
scratch.buf = scratch.buf[:0]
|
||||
}
|
||||
if cap(scratch.preview) > jsonLinePreviewBytes*4 {
|
||||
scratch.preview = nil
|
||||
} else if scratch.preview != nil {
|
||||
scratch.preview = scratch.preview[:0]
|
||||
}
|
||||
lineScratchPool.Put(scratch)
|
||||
}()
|
||||
|
||||
if warnFn == nil {
|
||||
warnFn = func(string) {}
|
||||
@@ -120,13 +83,14 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
totalEvents := 0
|
||||
|
||||
var (
|
||||
codexMessage string
|
||||
claudeMessage string
|
||||
geminiBuffer strings.Builder
|
||||
codexMessage string
|
||||
claudeMessage string
|
||||
geminiBuffer strings.Builder
|
||||
opencodeMessage strings.Builder
|
||||
)
|
||||
|
||||
for {
|
||||
line, tooLong, err := readLineWithLimit(reader, jsonLineMaxBytes, jsonLinePreviewBytes)
|
||||
line, tooLong, err := readLineWithLimit(reader, jsonLineMaxBytes, jsonLinePreviewBytes, scratch)
|
||||
if err != nil {
|
||||
if errors.Is(err, io.EOF) {
|
||||
break
|
||||
@@ -142,14 +106,14 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
totalEvents++
|
||||
|
||||
if tooLong {
|
||||
warnFn(fmt.Sprintf("Skipped overlong JSON line (> %d bytes): %s", jsonLineMaxBytes, truncateBytes(line, 100)))
|
||||
warnFn(fmt.Sprintf("Skipped overlong JSON line (> %d bytes): %s", jsonLineMaxBytes, TruncateBytes(line, 100)))
|
||||
continue
|
||||
}
|
||||
|
||||
// Single unmarshal for all backend types
|
||||
var event UnifiedEvent
|
||||
if err := json.Unmarshal(line, &event); err != nil {
|
||||
warnFn(fmt.Sprintf("Failed to parse event: %s", truncateBytes(line, 100)))
|
||||
warnFn(fmt.Sprintf("Failed to parse event: %s", TruncateBytes(line, 100)))
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -163,11 +127,46 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
isCodex = true
|
||||
}
|
||||
}
|
||||
// Codex-specific event types without thread_id or item
|
||||
if !isCodex && (event.Type == "turn.started" || event.Type == "turn.completed") {
|
||||
isCodex = true
|
||||
}
|
||||
isClaude := event.Subtype != "" || event.Result != ""
|
||||
if !isClaude && event.Type == "result" && event.SessionID != "" && event.Status == "" {
|
||||
isClaude = true
|
||||
}
|
||||
isGemini := event.Role != "" || event.Delta != nil || event.Status != ""
|
||||
isGemini := (event.Type == "init" && event.SessionID != "") || event.Role != "" || event.Delta != nil || event.Status != ""
|
||||
isOpencode := event.OpencodeSessionID != "" && len(event.Part) > 0
|
||||
|
||||
// Handle Opencode events first (most specific detection)
|
||||
if isOpencode {
|
||||
if threadID == "" {
|
||||
threadID = event.OpencodeSessionID
|
||||
}
|
||||
|
||||
var part OpencodePart
|
||||
if err := json.Unmarshal(event.Part, &part); err != nil {
|
||||
warnFn(fmt.Sprintf("Failed to parse opencode part: %s", err.Error()))
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract sessionID from part if available
|
||||
if part.SessionID != "" && threadID == "" {
|
||||
threadID = part.SessionID
|
||||
}
|
||||
|
||||
infoFn(fmt.Sprintf("Parsed Opencode event #%d type=%s part_type=%s", totalEvents, event.Type, part.Type))
|
||||
|
||||
if event.Type == "text" && part.Text != "" {
|
||||
opencodeMessage.WriteString(part.Text)
|
||||
notifyMessage()
|
||||
}
|
||||
|
||||
if part.Type == "step-finish" && part.Reason == "stop" {
|
||||
notifyComplete()
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Handle Codex events
|
||||
if isCodex {
|
||||
@@ -194,6 +193,10 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
infoFn(fmt.Sprintf("thread.completed event thread_id=%s", event.ThreadID))
|
||||
notifyComplete()
|
||||
|
||||
case "turn.completed":
|
||||
infoFn("turn.completed event")
|
||||
notifyComplete()
|
||||
|
||||
case "item.completed":
|
||||
var itemType string
|
||||
if len(event.Item) > 0 {
|
||||
@@ -209,7 +212,7 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
// Lazy parse: only parse item content when needed
|
||||
var item ItemContent
|
||||
if err := json.Unmarshal(event.Item, &item); err == nil {
|
||||
normalized := normalizeText(item.Text)
|
||||
normalized := NormalizeText(item.Text)
|
||||
infoFn(fmt.Sprintf("item.completed event item_type=%s message_len=%d", itemType, len(normalized)))
|
||||
if normalized != "" {
|
||||
codexMessage = normalized
|
||||
@@ -271,11 +274,13 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
continue
|
||||
}
|
||||
|
||||
// Unknown event format
|
||||
warnFn(fmt.Sprintf("Unknown event format: %s", truncateBytes(line, 100)))
|
||||
// Unknown event format from other backends (turn.started/assistant/user); ignore.
|
||||
continue
|
||||
}
|
||||
|
||||
switch {
|
||||
case opencodeMessage.Len() > 0:
|
||||
message = opencodeMessage.String()
|
||||
case geminiBuffer.Len() > 0:
|
||||
message = geminiBuffer.String()
|
||||
case claudeMessage != "":
|
||||
@@ -288,12 +293,12 @@ func parseJSONStreamInternal(r io.Reader, warnFn func(string), infoFn func(strin
|
||||
return message, threadID
|
||||
}
|
||||
|
||||
func hasKey(m map[string]json.RawMessage, key string) bool {
|
||||
func HasKey(m map[string]json.RawMessage, key string) bool {
|
||||
_, ok := m[key]
|
||||
return ok
|
||||
}
|
||||
|
||||
func discardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Reader, error) {
|
||||
func DiscardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Reader, error) {
|
||||
var buffered bytes.Buffer
|
||||
|
||||
if decoder != nil {
|
||||
@@ -319,7 +324,7 @@ func discardInvalidJSON(decoder *json.Decoder, reader *bufio.Reader) (*bufio.Rea
|
||||
return bufio.NewReader(io.MultiReader(bytes.NewReader(remaining), reader)), err
|
||||
}
|
||||
|
||||
func readLineWithLimit(r *bufio.Reader, maxBytes int, previewBytes int) (line []byte, tooLong bool, err error) {
|
||||
func readLineWithLimit(r *bufio.Reader, maxBytes int, previewBytes int, scratch *lineScratch) (line []byte, tooLong bool, err error) {
|
||||
if r == nil {
|
||||
return nil, false, errors.New("reader is nil")
|
||||
}
|
||||
@@ -342,12 +347,22 @@ func readLineWithLimit(r *bufio.Reader, maxBytes int, previewBytes int) (line []
|
||||
return part, false, nil
|
||||
}
|
||||
|
||||
preview := make([]byte, 0, min(previewBytes, len(part)))
|
||||
if scratch == nil {
|
||||
scratch = &lineScratch{}
|
||||
}
|
||||
if scratch.preview == nil {
|
||||
scratch.preview = make([]byte, 0, min(previewBytes, len(part)))
|
||||
}
|
||||
if scratch.buf == nil {
|
||||
scratch.buf = make([]byte, 0, min(maxBytes, len(part)*2))
|
||||
}
|
||||
|
||||
preview := scratch.preview[:0]
|
||||
if previewBytes > 0 {
|
||||
preview = append(preview, part[:min(previewBytes, len(part))]...)
|
||||
}
|
||||
|
||||
buf := make([]byte, 0, min(maxBytes, len(part)*2))
|
||||
buf := scratch.buf[:0]
|
||||
total := 0
|
||||
if len(part) > maxBytes {
|
||||
tooLong = true
|
||||
@@ -377,12 +392,16 @@ func readLineWithLimit(r *bufio.Reader, maxBytes int, previewBytes int) (line []
|
||||
}
|
||||
|
||||
if tooLong {
|
||||
scratch.preview = preview
|
||||
scratch.buf = buf
|
||||
return preview, true, nil
|
||||
}
|
||||
scratch.preview = preview
|
||||
scratch.buf = buf
|
||||
return buf, false, nil
|
||||
}
|
||||
|
||||
func truncateBytes(b []byte, maxLen int) string {
|
||||
func TruncateBytes(b []byte, maxLen int) string {
|
||||
if len(b) <= maxLen {
|
||||
return string(b)
|
||||
}
|
||||
@@ -392,7 +411,7 @@ func truncateBytes(b []byte, maxLen int) string {
|
||||
return string(b[:maxLen]) + "..."
|
||||
}
|
||||
|
||||
func normalizeText(text interface{}) string {
|
||||
func NormalizeText(text interface{}) string {
|
||||
switch v := text.(type) {
|
||||
case string:
|
||||
return v
|
||||
@@ -408,3 +427,10 @@ func normalizeText(text interface{}) string {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
50
codeagent-wrapper/internal/parser/parser_opencode_test.go
Normal file
50
codeagent-wrapper/internal/parser/parser_opencode_test.go
Normal file
@@ -0,0 +1,50 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestParseJSONStream_Opencode(t *testing.T) {
|
||||
input := `{"type":"step_start","timestamp":1768187730683,"sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","part":{"id":"prt_bb0339afa001NTqoJ2NS8x91zP","sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","messageID":"msg_bb033866f0011oZxTqvfy0TKtS","type":"step-start","snapshot":"904f0fd58c125b79e60f0993e38f9d9f6200bf47"}}
|
||||
{"type":"text","timestamp":1768187744432,"sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","part":{"id":"prt_bb0339cb5001QDd0Lh0PzFZpa3","sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","messageID":"msg_bb033866f0011oZxTqvfy0TKtS","type":"text","text":"Hello from opencode"}}
|
||||
{"type":"step_finish","timestamp":1768187744471,"sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","part":{"id":"prt_bb033d0af0019VRZzpO2OVW1na","sessionID":"ses_44fced3c7ffe83sZpzY1rlQka3","messageID":"msg_bb033866f0011oZxTqvfy0TKtS","type":"step-finish","reason":"stop","snapshot":"904f0fd58c125b79e60f0993e38f9d9f6200bf47","cost":0}}`
|
||||
|
||||
message, threadID := ParseJSONStreamInternal(strings.NewReader(input), nil, nil, nil, nil)
|
||||
|
||||
if threadID != "ses_44fced3c7ffe83sZpzY1rlQka3" {
|
||||
t.Errorf("threadID = %q, want %q", threadID, "ses_44fced3c7ffe83sZpzY1rlQka3")
|
||||
}
|
||||
if message != "Hello from opencode" {
|
||||
t.Errorf("message = %q, want %q", message, "Hello from opencode")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseJSONStream_Opencode_MultipleTextEvents(t *testing.T) {
|
||||
input := `{"type":"text","sessionID":"ses_123","part":{"type":"text","text":"Part 1"}}
|
||||
{"type":"text","sessionID":"ses_123","part":{"type":"text","text":" Part 2"}}
|
||||
{"type":"step_finish","sessionID":"ses_123","part":{"type":"step-finish","reason":"stop"}}`
|
||||
|
||||
message, threadID := ParseJSONStreamInternal(strings.NewReader(input), nil, nil, nil, nil)
|
||||
|
||||
if threadID != "ses_123" {
|
||||
t.Errorf("threadID = %q, want %q", threadID, "ses_123")
|
||||
}
|
||||
if message != "Part 1 Part 2" {
|
||||
t.Errorf("message = %q, want %q", message, "Part 1 Part 2")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseJSONStream_Opencode_NoStopReason(t *testing.T) {
|
||||
input := `{"type":"text","sessionID":"ses_456","part":{"type":"text","text":"Content"}}
|
||||
{"type":"step_finish","sessionID":"ses_456","part":{"type":"step-finish","reason":"tool-calls"}}`
|
||||
|
||||
message, threadID := ParseJSONStreamInternal(strings.NewReader(input), nil, nil, nil, nil)
|
||||
|
||||
if threadID != "ses_456" {
|
||||
t.Errorf("threadID = %q, want %q", threadID, "ses_456")
|
||||
}
|
||||
if message != "Content" {
|
||||
t.Errorf("message = %q, want %q", message, "Content")
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
package main
|
||||
package parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
@@ -18,7 +18,7 @@ func TestParseJSONStream_SkipsOverlongLineAndContinues(t *testing.T) {
|
||||
var warns []string
|
||||
warnFn := func(msg string) { warns = append(warns, msg) }
|
||||
|
||||
gotMessage, gotThreadID := parseJSONStreamInternal(strings.NewReader(input), warnFn, nil, nil, nil)
|
||||
gotMessage, gotThreadID := ParseJSONStreamInternal(strings.NewReader(input), warnFn, nil, nil, nil)
|
||||
if gotMessage != "ok" {
|
||||
t.Fatalf("message=%q, want %q (warns=%v)", gotMessage, "ok", warns)
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBackendParseJSONStream_UnknownEventsAreSilent(t *testing.T) {
|
||||
input := strings.Join([]string{
|
||||
`{"type":"turn.started"}`,
|
||||
`{"type":"assistant","text":"hi"}`,
|
||||
`{"type":"user","text":"yo"}`,
|
||||
`{"type":"item.completed","item":{"type":"agent_message","text":"ok"}}`,
|
||||
}, "\n")
|
||||
|
||||
var infos []string
|
||||
infoFn := func(msg string) { infos = append(infos, msg) }
|
||||
|
||||
message, threadID := ParseJSONStreamInternal(strings.NewReader(input), nil, infoFn, nil, nil)
|
||||
if message != "ok" {
|
||||
t.Fatalf("message=%q, want %q (infos=%v)", message, "ok", infos)
|
||||
}
|
||||
if threadID != "" {
|
||||
t.Fatalf("threadID=%q, want empty (infos=%v)", threadID, infos)
|
||||
}
|
||||
|
||||
for _, msg := range infos {
|
||||
if strings.Contains(msg, "Agent event:") {
|
||||
t.Fatalf("unexpected log for unknown event: %q", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
15
codeagent-wrapper/internal/parser/truncate_bytes_test.go
Normal file
15
codeagent-wrapper/internal/parser/truncate_bytes_test.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package parser
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestTruncateBytes(t *testing.T) {
|
||||
if got := TruncateBytes([]byte("abc"), 3); got != "abc" {
|
||||
t.Fatalf("TruncateBytes() = %q, want %q", got, "abc")
|
||||
}
|
||||
if got := TruncateBytes([]byte("abcd"), 3); got != "abc..." {
|
||||
t.Fatalf("TruncateBytes() = %q, want %q", got, "abc...")
|
||||
}
|
||||
if got := TruncateBytes([]byte("abcd"), -1); got != "" {
|
||||
t.Fatalf("TruncateBytes() = %q, want empty string", got)
|
||||
}
|
||||
}
|
||||
8
codeagent-wrapper/internal/utils/math.go
Normal file
8
codeagent-wrapper/internal/utils/math.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package utils
|
||||
|
||||
func Min(a, b int) int {
|
||||
if a < b {
|
||||
return a
|
||||
}
|
||||
return b
|
||||
}
|
||||
36
codeagent-wrapper/internal/utils/math_test.go
Normal file
36
codeagent-wrapper/internal/utils/math_test.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package utils
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestMin(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
a, b int
|
||||
want int
|
||||
}{
|
||||
{"a less than b", 1, 2, 1},
|
||||
{"b less than a", 5, 3, 3},
|
||||
{"equal values", 7, 7, 7},
|
||||
{"negative a", -5, 3, -5},
|
||||
{"negative b", 5, -3, -3},
|
||||
{"both negative", -5, -3, -5},
|
||||
{"zero and positive", 0, 5, 0},
|
||||
{"zero and negative", 0, -5, -5},
|
||||
{"large values", 1000000, 999999, 999999},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := Min(tt.a, tt.b)
|
||||
if got != tt.want {
|
||||
t.Errorf("Min(%d, %d) = %d, want %d", tt.a, tt.b, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkMin(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
Min(i, i+1)
|
||||
}
|
||||
}
|
||||
62
codeagent-wrapper/internal/utils/strings.go
Normal file
62
codeagent-wrapper/internal/utils/strings.go
Normal file
@@ -0,0 +1,62 @@
|
||||
package utils
|
||||
|
||||
import "strings"
|
||||
|
||||
func Truncate(s string, maxLen int) string {
|
||||
if len(s) <= maxLen {
|
||||
return s
|
||||
}
|
||||
if maxLen < 0 {
|
||||
return ""
|
||||
}
|
||||
return s[:maxLen] + "..."
|
||||
}
|
||||
|
||||
// SafeTruncate safely truncates string to maxLen, avoiding panic and UTF-8 corruption.
|
||||
func SafeTruncate(s string, maxLen int) string {
|
||||
if maxLen <= 0 || s == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
runes := []rune(s)
|
||||
if len(runes) <= maxLen {
|
||||
return s
|
||||
}
|
||||
|
||||
if maxLen < 4 {
|
||||
return string(runes[:1])
|
||||
}
|
||||
|
||||
cutoff := maxLen - 3
|
||||
if cutoff <= 0 {
|
||||
return string(runes[:1])
|
||||
}
|
||||
if len(runes) <= cutoff {
|
||||
return s
|
||||
}
|
||||
return string(runes[:cutoff]) + "..."
|
||||
}
|
||||
|
||||
// SanitizeOutput removes ANSI escape sequences and control characters.
|
||||
func SanitizeOutput(s string) string {
|
||||
var result strings.Builder
|
||||
inEscape := false
|
||||
for i := 0; i < len(s); i++ {
|
||||
if s[i] == '\x1b' && i+1 < len(s) && s[i+1] == '[' {
|
||||
inEscape = true
|
||||
i++ // skip '['
|
||||
continue
|
||||
}
|
||||
if inEscape {
|
||||
if (s[i] >= 'A' && s[i] <= 'Z') || (s[i] >= 'a' && s[i] <= 'z') {
|
||||
inEscape = false
|
||||
}
|
||||
continue
|
||||
}
|
||||
// Keep printable chars and common whitespace.
|
||||
if s[i] >= 32 || s[i] == '\n' || s[i] == '\t' {
|
||||
result.WriteByte(s[i])
|
||||
}
|
||||
}
|
||||
return result.String()
|
||||
}
|
||||
122
codeagent-wrapper/internal/utils/strings_test.go
Normal file
122
codeagent-wrapper/internal/utils/strings_test.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package utils
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestTruncate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
s string
|
||||
maxLen int
|
||||
want string
|
||||
}{
|
||||
{"empty string", "", 10, ""},
|
||||
{"short string", "hello", 10, "hello"},
|
||||
{"exact length", "hello", 5, "hello"},
|
||||
{"needs truncation", "hello world", 5, "hello..."},
|
||||
{"zero maxLen", "hello", 0, "..."},
|
||||
{"negative maxLen", "hello", -1, ""},
|
||||
{"maxLen 1", "hello", 1, "h..."},
|
||||
{"unicode bytes truncate", "你好世界", 10, "你好世\xe7..."}, // Truncate works on bytes, not runes
|
||||
{"mixed truncate", "hello世界abc", 7, "hello\xe4\xb8..."}, // byte-based truncation
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := Truncate(tt.s, tt.maxLen)
|
||||
if got != tt.want {
|
||||
t.Errorf("Truncate(%q, %d) = %q, want %q", tt.s, tt.maxLen, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSafeTruncate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
s string
|
||||
maxLen int
|
||||
want string
|
||||
}{
|
||||
{"empty string", "", 10, ""},
|
||||
{"zero maxLen", "hello", 0, ""},
|
||||
{"negative maxLen", "hello", -1, ""},
|
||||
{"short string", "hello", 10, "hello"},
|
||||
{"exact length", "hello", 5, "hello"},
|
||||
{"needs truncation", "hello world", 8, "hello..."},
|
||||
{"maxLen 1", "hello", 1, "h"},
|
||||
{"maxLen 2", "hello", 2, "h"},
|
||||
{"maxLen 3", "hello", 3, "h"},
|
||||
{"maxLen 4", "hello", 4, "h..."},
|
||||
{"unicode preserved", "你好世界", 10, "你好世界"},
|
||||
{"unicode exact", "你好世界", 4, "你好世界"},
|
||||
{"unicode truncate", "你好世界test", 6, "你好世..."},
|
||||
{"mixed unicode", "ab你好cd", 5, "ab..."},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := SafeTruncate(tt.s, tt.maxLen)
|
||||
if got != tt.want {
|
||||
t.Errorf("SafeTruncate(%q, %d) = %q, want %q", tt.s, tt.maxLen, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestSanitizeOutput(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
s string
|
||||
want string
|
||||
}{
|
||||
{"empty string", "", ""},
|
||||
{"plain text", "hello world", "hello world"},
|
||||
{"with newline", "hello\nworld", "hello\nworld"},
|
||||
{"with tab", "hello\tworld", "hello\tworld"},
|
||||
{"ANSI color red", "\x1b[31mred\x1b[0m", "red"},
|
||||
{"ANSI bold", "\x1b[1mbold\x1b[0m", "bold"},
|
||||
{"ANSI complex", "\x1b[1;31;40mtext\x1b[0m", "text"},
|
||||
{"control chars", "hello\x00\x01\x02world", "helloworld"},
|
||||
{"mixed ANSI and control", "\x1b[32m\x00ok\x1b[0m", "ok"},
|
||||
{"multiple ANSI sequences", "\x1b[31mred\x1b[0m \x1b[32mgreen\x1b[0m", "red green"},
|
||||
{"incomplete escape", "\x1b[", ""},
|
||||
{"escape without bracket", "\x1bA", "A"},
|
||||
{"cursor movement", "\x1b[2Aup\x1b[2Bdown", "updown"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := SanitizeOutput(tt.s)
|
||||
if got != tt.want {
|
||||
t.Errorf("SanitizeOutput(%q) = %q, want %q", tt.s, got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkTruncate(b *testing.B) {
|
||||
s := strings.Repeat("hello world ", 100)
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
Truncate(s, 50)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSafeTruncate(b *testing.B) {
|
||||
s := strings.Repeat("你好世界", 100)
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
SafeTruncate(s, 50)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSanitizeOutput(b *testing.B) {
|
||||
s := strings.Repeat("\x1b[31mred\x1b[0m text ", 50)
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
SanitizeOutput(s)
|
||||
}
|
||||
}
|
||||
@@ -1,505 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"os/exec"
|
||||
"os/signal"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
)
|
||||
|
||||
const (
|
||||
version = "5.4.0"
|
||||
defaultWorkdir = "."
|
||||
defaultTimeout = 7200 // seconds (2 hours)
|
||||
defaultCoverageTarget = 90.0
|
||||
codexLogLineLimit = 1000
|
||||
stdinSpecialChars = "\n\\\"'`$"
|
||||
stderrCaptureLimit = 4 * 1024
|
||||
defaultBackendName = "codex"
|
||||
defaultCodexCommand = "codex"
|
||||
|
||||
// stdout close reasons
|
||||
stdoutCloseReasonWait = "wait-done"
|
||||
stdoutCloseReasonDrain = "drain-timeout"
|
||||
stdoutCloseReasonCtx = "context-cancel"
|
||||
stdoutDrainTimeout = 100 * time.Millisecond
|
||||
)
|
||||
|
||||
var useASCIIMode = os.Getenv("CODEAGENT_ASCII_MODE") == "true"
|
||||
|
||||
// Test hooks for dependency injection
|
||||
var (
|
||||
stdinReader io.Reader = os.Stdin
|
||||
isTerminalFn = defaultIsTerminal
|
||||
codexCommand = defaultCodexCommand
|
||||
cleanupHook func()
|
||||
loggerPtr atomic.Pointer[Logger]
|
||||
|
||||
buildCodexArgsFn = buildCodexArgs
|
||||
selectBackendFn = selectBackend
|
||||
commandContext = exec.CommandContext
|
||||
jsonMarshal = json.Marshal
|
||||
cleanupLogsFn = cleanupOldLogs
|
||||
signalNotifyFn = signal.Notify
|
||||
signalStopFn = signal.Stop
|
||||
terminateCommandFn = terminateCommand
|
||||
defaultBuildArgsFn = buildCodexArgs
|
||||
runTaskFn = runCodexTask
|
||||
exitFn = os.Exit
|
||||
)
|
||||
|
||||
var forceKillDelay atomic.Int32
|
||||
|
||||
func init() {
|
||||
forceKillDelay.Store(5) // seconds - default value
|
||||
}
|
||||
|
||||
func runStartupCleanup() {
|
||||
if cleanupLogsFn == nil {
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs panic: %v", r))
|
||||
}
|
||||
}()
|
||||
if _, err := cleanupLogsFn(); err != nil {
|
||||
logWarn(fmt.Sprintf("cleanupOldLogs error: %v", err))
|
||||
}
|
||||
}
|
||||
|
||||
func runCleanupMode() int {
|
||||
if cleanupLogsFn == nil {
|
||||
fmt.Fprintln(os.Stderr, "Cleanup failed: log cleanup function not configured")
|
||||
return 1
|
||||
}
|
||||
|
||||
stats, err := cleanupLogsFn()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Cleanup failed: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
fmt.Println("Cleanup completed")
|
||||
fmt.Printf("Files scanned: %d\n", stats.Scanned)
|
||||
fmt.Printf("Files deleted: %d\n", stats.Deleted)
|
||||
if len(stats.DeletedFiles) > 0 {
|
||||
for _, f := range stats.DeletedFiles {
|
||||
fmt.Printf(" - %s\n", f)
|
||||
}
|
||||
}
|
||||
fmt.Printf("Files kept: %d\n", stats.Kept)
|
||||
if len(stats.KeptFiles) > 0 {
|
||||
for _, f := range stats.KeptFiles {
|
||||
fmt.Printf(" - %s\n", f)
|
||||
}
|
||||
}
|
||||
if stats.Errors > 0 {
|
||||
fmt.Printf("Deletion errors: %d\n", stats.Errors)
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func main() {
|
||||
exitCode := run()
|
||||
exitFn(exitCode)
|
||||
}
|
||||
|
||||
// run is the main logic, returns exit code for testability
|
||||
func run() (exitCode int) {
|
||||
name := currentWrapperName()
|
||||
// Handle --version and --help first (no logger needed)
|
||||
if len(os.Args) > 1 {
|
||||
switch os.Args[1] {
|
||||
case "--version", "-v":
|
||||
fmt.Printf("%s version %s\n", name, version)
|
||||
return 0
|
||||
case "--help", "-h":
|
||||
printHelp()
|
||||
return 0
|
||||
case "--cleanup":
|
||||
return runCleanupMode()
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize logger for all other commands
|
||||
logger, err := NewLogger()
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to initialize logger: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
setLogger(logger)
|
||||
|
||||
defer func() {
|
||||
logger := activeLogger()
|
||||
if logger != nil {
|
||||
logger.Flush()
|
||||
}
|
||||
if err := closeLogger(); err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to close logger: %v\n", err)
|
||||
}
|
||||
// On failure, extract and display recent errors before removing log
|
||||
if logger != nil {
|
||||
if exitCode != 0 {
|
||||
if errors := logger.ExtractRecentErrors(10); len(errors) > 0 {
|
||||
fmt.Fprintln(os.Stderr, "\n=== Recent Errors ===")
|
||||
for _, entry := range errors {
|
||||
fmt.Fprintln(os.Stderr, entry)
|
||||
}
|
||||
fmt.Fprintf(os.Stderr, "Log file: %s (deleted)\n", logger.Path())
|
||||
}
|
||||
}
|
||||
if err := logger.RemoveLogFile(); err != nil && !os.IsNotExist(err) {
|
||||
// Silently ignore removal errors
|
||||
}
|
||||
}
|
||||
}()
|
||||
defer runCleanupHook()
|
||||
|
||||
// Clean up stale logs from previous runs.
|
||||
runStartupCleanup()
|
||||
|
||||
// Handle remaining commands
|
||||
if len(os.Args) > 1 {
|
||||
args := os.Args[1:]
|
||||
parallelIndex := -1
|
||||
for i, arg := range args {
|
||||
if arg == "--parallel" {
|
||||
parallelIndex = i
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if parallelIndex != -1 {
|
||||
backendName := defaultBackendName
|
||||
fullOutput := false
|
||||
var extras []string
|
||||
|
||||
for i := 0; i < len(args); i++ {
|
||||
arg := args[i]
|
||||
switch {
|
||||
case arg == "--parallel":
|
||||
continue
|
||||
case arg == "--full-output":
|
||||
fullOutput = true
|
||||
case arg == "--backend":
|
||||
if i+1 >= len(args) {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --backend flag requires a value")
|
||||
return 1
|
||||
}
|
||||
backendName = args[i+1]
|
||||
i++
|
||||
case strings.HasPrefix(arg, "--backend="):
|
||||
value := strings.TrimPrefix(arg, "--backend=")
|
||||
if value == "" {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --backend flag requires a value")
|
||||
return 1
|
||||
}
|
||||
backendName = value
|
||||
default:
|
||||
extras = append(extras, arg)
|
||||
}
|
||||
}
|
||||
|
||||
if len(extras) > 0 {
|
||||
fmt.Fprintln(os.Stderr, "ERROR: --parallel reads its task configuration from stdin; only --backend and --full-output are allowed.")
|
||||
fmt.Fprintln(os.Stderr, "Usage examples:")
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel < tasks.txt\n", name)
|
||||
fmt.Fprintf(os.Stderr, " echo '...' | %s --parallel\n", name)
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel <<'EOF'\n", name)
|
||||
fmt.Fprintf(os.Stderr, " %s --parallel --full-output <<'EOF' # include full task output\n", name)
|
||||
return 1
|
||||
}
|
||||
|
||||
backend, err := selectBackendFn(backendName)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
backendName = backend.Name()
|
||||
|
||||
data, err := io.ReadAll(stdinReader)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: failed to read stdin: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
cfg, err := parseParallelConfig(data)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
cfg.GlobalBackend = backendName
|
||||
for i := range cfg.Tasks {
|
||||
if strings.TrimSpace(cfg.Tasks[i].Backend) == "" {
|
||||
cfg.Tasks[i].Backend = backendName
|
||||
}
|
||||
}
|
||||
|
||||
timeoutSec := resolveTimeout()
|
||||
layers, err := topologicalSort(cfg.Tasks)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "ERROR: %v\n", err)
|
||||
return 1
|
||||
}
|
||||
|
||||
results := executeConcurrent(layers, timeoutSec)
|
||||
|
||||
// Extract structured report fields from each result
|
||||
for i := range results {
|
||||
results[i].CoverageTarget = defaultCoverageTarget
|
||||
if results[i].Message == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
lines := strings.Split(results[i].Message, "\n")
|
||||
|
||||
// Coverage extraction
|
||||
results[i].Coverage = extractCoverageFromLines(lines)
|
||||
results[i].CoverageNum = extractCoverageNum(results[i].Coverage)
|
||||
|
||||
// Files changed
|
||||
results[i].FilesChanged = extractFilesChangedFromLines(lines)
|
||||
|
||||
// Test results
|
||||
results[i].TestsPassed, results[i].TestsFailed = extractTestResultsFromLines(lines)
|
||||
|
||||
// Key output summary
|
||||
results[i].KeyOutput = extractKeyOutputFromLines(lines, 150)
|
||||
}
|
||||
|
||||
// Default: summary mode (context-efficient)
|
||||
// --full-output: legacy full output mode
|
||||
fmt.Println(generateFinalOutputWithMode(results, !fullOutput))
|
||||
|
||||
exitCode = 0
|
||||
for _, res := range results {
|
||||
if res.ExitCode != 0 {
|
||||
exitCode = res.ExitCode
|
||||
}
|
||||
}
|
||||
|
||||
return exitCode
|
||||
}
|
||||
}
|
||||
|
||||
logInfo("Script started")
|
||||
|
||||
cfg, err := parseArgs()
|
||||
if err != nil {
|
||||
logError(err.Error())
|
||||
return 1
|
||||
}
|
||||
logInfo(fmt.Sprintf("Parsed args: mode=%s, task_len=%d, backend=%s", cfg.Mode, len(cfg.Task), cfg.Backend))
|
||||
|
||||
backend, err := selectBackendFn(cfg.Backend)
|
||||
if err != nil {
|
||||
logError(err.Error())
|
||||
return 1
|
||||
}
|
||||
cfg.Backend = backend.Name()
|
||||
|
||||
cmdInjected := codexCommand != defaultCodexCommand
|
||||
argsInjected := buildCodexArgsFn != nil && reflect.ValueOf(buildCodexArgsFn).Pointer() != reflect.ValueOf(defaultBuildArgsFn).Pointer()
|
||||
|
||||
// Wire selected backend into runtime hooks for the rest of the execution,
|
||||
// but preserve any injected test hooks for the default backend.
|
||||
if backend.Name() != defaultBackendName || !cmdInjected {
|
||||
codexCommand = backend.Command()
|
||||
}
|
||||
if backend.Name() != defaultBackendName || !argsInjected {
|
||||
buildCodexArgsFn = backend.BuildArgs
|
||||
}
|
||||
logInfo(fmt.Sprintf("Selected backend: %s", backend.Name()))
|
||||
|
||||
timeoutSec := resolveTimeout()
|
||||
logInfo(fmt.Sprintf("Timeout: %ds", timeoutSec))
|
||||
cfg.Timeout = timeoutSec
|
||||
|
||||
var taskText string
|
||||
var piped bool
|
||||
|
||||
if cfg.ExplicitStdin {
|
||||
logInfo("Explicit stdin mode: reading task from stdin")
|
||||
data, err := io.ReadAll(stdinReader)
|
||||
if err != nil {
|
||||
logError("Failed to read stdin: " + err.Error())
|
||||
return 1
|
||||
}
|
||||
taskText = string(data)
|
||||
if taskText == "" {
|
||||
logError("Explicit stdin mode requires task input from stdin")
|
||||
return 1
|
||||
}
|
||||
piped = !isTerminal()
|
||||
} else {
|
||||
pipedTask, err := readPipedTask()
|
||||
if err != nil {
|
||||
logError("Failed to read piped stdin: " + err.Error())
|
||||
return 1
|
||||
}
|
||||
piped = pipedTask != ""
|
||||
if piped {
|
||||
taskText = pipedTask
|
||||
} else {
|
||||
taskText = cfg.Task
|
||||
}
|
||||
}
|
||||
|
||||
useStdin := cfg.ExplicitStdin || shouldUseStdin(taskText, piped)
|
||||
|
||||
targetArg := taskText
|
||||
if useStdin {
|
||||
targetArg = "-"
|
||||
}
|
||||
codexArgs := buildCodexArgsFn(cfg, targetArg)
|
||||
|
||||
// Print startup information to stderr
|
||||
fmt.Fprintf(os.Stderr, "[%s]\n", name)
|
||||
fmt.Fprintf(os.Stderr, " Backend: %s\n", cfg.Backend)
|
||||
fmt.Fprintf(os.Stderr, " Command: %s %s\n", codexCommand, strings.Join(codexArgs, " "))
|
||||
fmt.Fprintf(os.Stderr, " PID: %d\n", os.Getpid())
|
||||
fmt.Fprintf(os.Stderr, " Log: %s\n", logger.Path())
|
||||
|
||||
if useStdin {
|
||||
var reasons []string
|
||||
if piped {
|
||||
reasons = append(reasons, "piped input")
|
||||
}
|
||||
if cfg.ExplicitStdin {
|
||||
reasons = append(reasons, "explicit \"-\"")
|
||||
}
|
||||
if strings.Contains(taskText, "\n") {
|
||||
reasons = append(reasons, "newline")
|
||||
}
|
||||
if strings.Contains(taskText, "\\") {
|
||||
reasons = append(reasons, "backslash")
|
||||
}
|
||||
if strings.Contains(taskText, "\"") {
|
||||
reasons = append(reasons, "double-quote")
|
||||
}
|
||||
if strings.Contains(taskText, "'") {
|
||||
reasons = append(reasons, "single-quote")
|
||||
}
|
||||
if strings.Contains(taskText, "`") {
|
||||
reasons = append(reasons, "backtick")
|
||||
}
|
||||
if strings.Contains(taskText, "$") {
|
||||
reasons = append(reasons, "dollar")
|
||||
}
|
||||
if len(taskText) > 800 {
|
||||
reasons = append(reasons, "length>800")
|
||||
}
|
||||
if len(reasons) > 0 {
|
||||
logWarn(fmt.Sprintf("Using stdin mode for task due to: %s", strings.Join(reasons, ", ")))
|
||||
}
|
||||
}
|
||||
|
||||
logInfo(fmt.Sprintf("%s running...", cfg.Backend))
|
||||
|
||||
taskSpec := TaskSpec{
|
||||
Task: taskText,
|
||||
WorkDir: cfg.WorkDir,
|
||||
Mode: cfg.Mode,
|
||||
SessionID: cfg.SessionID,
|
||||
UseStdin: useStdin,
|
||||
}
|
||||
|
||||
result := runTaskFn(taskSpec, false, cfg.Timeout)
|
||||
|
||||
if result.ExitCode != 0 {
|
||||
return result.ExitCode
|
||||
}
|
||||
|
||||
fmt.Println(result.Message)
|
||||
if result.SessionID != "" {
|
||||
fmt.Printf("\n---\nSESSION_ID: %s\n", result.SessionID)
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
func setLogger(l *Logger) {
|
||||
loggerPtr.Store(l)
|
||||
}
|
||||
|
||||
func closeLogger() error {
|
||||
logger := loggerPtr.Swap(nil)
|
||||
if logger == nil {
|
||||
return nil
|
||||
}
|
||||
return logger.Close()
|
||||
}
|
||||
|
||||
func activeLogger() *Logger {
|
||||
return loggerPtr.Load()
|
||||
}
|
||||
|
||||
func logInfo(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Info(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func logWarn(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Warn(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func logError(msg string) {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Error(msg)
|
||||
}
|
||||
}
|
||||
|
||||
func runCleanupHook() {
|
||||
if logger := activeLogger(); logger != nil {
|
||||
logger.Flush()
|
||||
}
|
||||
if cleanupHook != nil {
|
||||
cleanupHook()
|
||||
}
|
||||
}
|
||||
|
||||
func printHelp() {
|
||||
name := currentWrapperName()
|
||||
help := fmt.Sprintf(`%[1]s - Go wrapper for AI CLI backends
|
||||
|
||||
Usage:
|
||||
%[1]s "task" [workdir]
|
||||
%[1]s --backend claude "task" [workdir]
|
||||
%[1]s - [workdir] Read task from stdin
|
||||
%[1]s resume <session_id> "task" [workdir]
|
||||
%[1]s resume <session_id> - [workdir]
|
||||
%[1]s --parallel Run tasks in parallel (config from stdin)
|
||||
%[1]s --parallel --full-output Run tasks in parallel with full output (legacy)
|
||||
%[1]s --version
|
||||
%[1]s --help
|
||||
|
||||
Parallel mode examples:
|
||||
%[1]s --parallel < tasks.txt
|
||||
echo '...' | %[1]s --parallel
|
||||
%[1]s --parallel --full-output < tasks.txt
|
||||
%[1]s --parallel <<'EOF'
|
||||
|
||||
Environment Variables:
|
||||
CODEX_TIMEOUT Timeout in milliseconds (default: 7200000)
|
||||
CODEAGENT_ASCII_MODE Use ASCII symbols instead of Unicode (PASS/WARN/FAIL)
|
||||
|
||||
Exit Codes:
|
||||
0 Success
|
||||
1 General error (missing args, no output)
|
||||
124 Timeout
|
||||
127 backend command not found
|
||||
130 Interrupted (Ctrl+C)
|
||||
* Passthrough from backend process`, name)
|
||||
fmt.Println(help)
|
||||
}
|
||||
@@ -1,217 +0,0 @@
|
||||
//go:build unix || darwin || linux
|
||||
// +build unix darwin linux
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func TestIsProcessRunning(t *testing.T) {
|
||||
t.Run("current process", func(t *testing.T) {
|
||||
if !isProcessRunning(os.Getpid()) {
|
||||
t.Fatalf("expected current process (pid=%d) to be running", os.Getpid())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fake pid", func(t *testing.T) {
|
||||
const nonexistentPID = 1 << 30
|
||||
if isProcessRunning(nonexistentPID) {
|
||||
t.Fatalf("expected pid %d to be reported as not running", nonexistentPID)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("terminated process", func(t *testing.T) {
|
||||
pid := exitedProcessPID(t)
|
||||
if isProcessRunning(pid) {
|
||||
t.Fatalf("expected exited child process (pid=%d) to be reported as not running", pid)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("boundary values", func(t *testing.T) {
|
||||
if isProcessRunning(0) {
|
||||
t.Fatalf("pid 0 should never be treated as running")
|
||||
}
|
||||
if isProcessRunning(-42) {
|
||||
t.Fatalf("negative pid should never be treated as running")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("find process error", func(t *testing.T) {
|
||||
original := findProcess
|
||||
defer func() { findProcess = original }()
|
||||
|
||||
mockErr := errors.New("findProcess failure")
|
||||
findProcess = func(pid int) (*os.Process, error) {
|
||||
return nil, mockErr
|
||||
}
|
||||
|
||||
if isProcessRunning(1234) {
|
||||
t.Fatalf("expected false when os.FindProcess fails")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func exitedProcessPID(t *testing.T) int {
|
||||
t.Helper()
|
||||
|
||||
var cmd *exec.Cmd
|
||||
if runtime.GOOS == "windows" {
|
||||
cmd = exec.Command("cmd", "/c", "exit 0")
|
||||
} else {
|
||||
cmd = exec.Command("sh", "-c", "exit 0")
|
||||
}
|
||||
|
||||
if err := cmd.Start(); err != nil {
|
||||
t.Fatalf("failed to start helper process: %v", err)
|
||||
}
|
||||
pid := cmd.Process.Pid
|
||||
|
||||
if err := cmd.Wait(); err != nil {
|
||||
t.Fatalf("helper process did not exit cleanly: %v", err)
|
||||
}
|
||||
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
return pid
|
||||
}
|
||||
|
||||
func TestRunProcessCheckSmoke(t *testing.T) {
|
||||
t.Run("current process", func(t *testing.T) {
|
||||
if !isProcessRunning(os.Getpid()) {
|
||||
t.Fatalf("expected current process (pid=%d) to be running", os.Getpid())
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("fake pid", func(t *testing.T) {
|
||||
const nonexistentPID = 1 << 30
|
||||
if isProcessRunning(nonexistentPID) {
|
||||
t.Fatalf("expected pid %d to be reported as not running", nonexistentPID)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("boundary values", func(t *testing.T) {
|
||||
if isProcessRunning(0) {
|
||||
t.Fatalf("pid 0 should never be treated as running")
|
||||
}
|
||||
if isProcessRunning(-42) {
|
||||
t.Fatalf("negative pid should never be treated as running")
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("find process error", func(t *testing.T) {
|
||||
original := findProcess
|
||||
defer func() { findProcess = original }()
|
||||
|
||||
mockErr := errors.New("findProcess failure")
|
||||
findProcess = func(pid int) (*os.Process, error) {
|
||||
return nil, mockErr
|
||||
}
|
||||
|
||||
if isProcessRunning(1234) {
|
||||
t.Fatalf("expected false when os.FindProcess fails")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetProcessStartTimeReadsProcStat(t *testing.T) {
|
||||
pid := 4321
|
||||
boot := time.Unix(1_710_000_000, 0)
|
||||
startTicks := uint64(4500)
|
||||
|
||||
statFields := make([]string, 25)
|
||||
for i := range statFields {
|
||||
statFields[i] = strconv.Itoa(i + 1)
|
||||
}
|
||||
statFields[19] = strconv.FormatUint(startTicks, 10)
|
||||
statContent := fmt.Sprintf("%d (%s) %s", pid, "cmd with space", strings.Join(statFields, " "))
|
||||
|
||||
stubReadFile(t, func(path string) ([]byte, error) {
|
||||
switch path {
|
||||
case fmt.Sprintf("/proc/%d/stat", pid):
|
||||
return []byte(statContent), nil
|
||||
case "/proc/stat":
|
||||
return []byte(fmt.Sprintf("cpu 0 0 0 0\nbtime %d\n", boot.Unix())), nil
|
||||
default:
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
})
|
||||
|
||||
got := getProcessStartTime(pid)
|
||||
want := boot.Add(time.Duration(startTicks/100) * time.Second)
|
||||
if !got.Equal(want) {
|
||||
t.Fatalf("getProcessStartTime() = %v, want %v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetProcessStartTimeInvalidData(t *testing.T) {
|
||||
pid := 99
|
||||
stubReadFile(t, func(path string) ([]byte, error) {
|
||||
switch path {
|
||||
case fmt.Sprintf("/proc/%d/stat", pid):
|
||||
return []byte("garbage"), nil
|
||||
case "/proc/stat":
|
||||
return []byte("btime not-a-number\n"), nil
|
||||
default:
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
})
|
||||
|
||||
if got := getProcessStartTime(pid); !got.IsZero() {
|
||||
t.Fatalf("invalid /proc data should return zero time, got %v", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBootTimeParsesBtime(t *testing.T) {
|
||||
const bootSec = 1_711_111_111
|
||||
stubReadFile(t, func(path string) ([]byte, error) {
|
||||
if path != "/proc/stat" {
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
content := fmt.Sprintf("intr 0\nbtime %d\n", bootSec)
|
||||
return []byte(content), nil
|
||||
})
|
||||
|
||||
got := getBootTime()
|
||||
want := time.Unix(bootSec, 0)
|
||||
if !got.Equal(want) {
|
||||
t.Fatalf("getBootTime() = %v, want %v", got, want)
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetBootTimeInvalidData(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
content string
|
||||
}{
|
||||
{"missing", "cpu 0 0 0 0"},
|
||||
{"malformed", "btime abc"},
|
||||
}
|
||||
|
||||
for _, tt := range cases {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
stubReadFile(t, func(string) ([]byte, error) {
|
||||
return []byte(tt.content), nil
|
||||
})
|
||||
if got := getBootTime(); !got.IsZero() {
|
||||
t.Fatalf("getBootTime() unexpected value for %s: %v", tt.name, got)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func stubReadFile(t *testing.T, fn func(string) ([]byte, error)) {
|
||||
t.Helper()
|
||||
original := readFileFn
|
||||
readFileFn = fn
|
||||
t.Cleanup(func() {
|
||||
readFileFn = original
|
||||
})
|
||||
}
|
||||
@@ -1,104 +0,0 @@
|
||||
//go:build unix || darwin || linux
|
||||
// +build unix darwin linux
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
)
|
||||
|
||||
var findProcess = os.FindProcess
|
||||
var readFileFn = os.ReadFile
|
||||
|
||||
// isProcessRunning returns true if a process with the given pid is running on Unix-like systems.
|
||||
func isProcessRunning(pid int) bool {
|
||||
if pid <= 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
proc, err := findProcess(pid)
|
||||
if err != nil || proc == nil {
|
||||
return false
|
||||
}
|
||||
|
||||
err = proc.Signal(syscall.Signal(0))
|
||||
if err != nil && (errors.Is(err, syscall.ESRCH) || errors.Is(err, os.ErrProcessDone)) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// getProcessStartTime returns the start time of a process on Unix-like systems.
|
||||
// Returns zero time if the start time cannot be determined.
|
||||
func getProcessStartTime(pid int) time.Time {
|
||||
if pid <= 0 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// Read /proc/<pid>/stat to get process start time
|
||||
statPath := fmt.Sprintf("/proc/%d/stat", pid)
|
||||
data, err := readFileFn(statPath)
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// Parse stat file: fields are space-separated, but comm (field 2) can contain spaces
|
||||
// Find the last ')' to skip comm field safely
|
||||
content := string(data)
|
||||
lastParen := strings.LastIndex(content, ")")
|
||||
if lastParen == -1 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
fields := strings.Fields(content[lastParen+1:])
|
||||
if len(fields) < 20 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// Field 22 (index 19 after comm) is starttime in clock ticks since boot
|
||||
startTicks, err := strconv.ParseUint(fields[19], 10, 64)
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// Get system boot time
|
||||
bootTime := getBootTime()
|
||||
if bootTime.IsZero() {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
// Convert ticks to duration (typically 100 ticks/sec on most systems)
|
||||
ticksPerSec := uint64(100) // sysconf(_SC_CLK_TCK), typically 100
|
||||
startTime := bootTime.Add(time.Duration(startTicks/ticksPerSec) * time.Second)
|
||||
|
||||
return startTime
|
||||
}
|
||||
|
||||
// getBootTime returns the system boot time by reading /proc/stat.
|
||||
func getBootTime() time.Time {
|
||||
data, err := readFileFn("/proc/stat")
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
lines := strings.Split(string(data), "\n")
|
||||
for _, line := range lines {
|
||||
if strings.HasPrefix(line, "btime ") {
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) >= 2 {
|
||||
bootSec, err := strconv.ParseInt(fields[1], 10, 64)
|
||||
if err == nil {
|
||||
return time.Unix(bootSec, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return time.Time{}
|
||||
}
|
||||
@@ -1,87 +0,0 @@
|
||||
//go:build windows
|
||||
// +build windows
|
||||
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"syscall"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
processQueryLimitedInformation = 0x1000
|
||||
stillActive = 259 // STILL_ACTIVE exit code
|
||||
)
|
||||
|
||||
var (
|
||||
findProcess = os.FindProcess
|
||||
kernel32 = syscall.NewLazyDLL("kernel32.dll")
|
||||
getProcessTimes = kernel32.NewProc("GetProcessTimes")
|
||||
fileTimeToUnixFn = fileTimeToUnix
|
||||
)
|
||||
|
||||
// isProcessRunning returns true if a process with the given pid is running on Windows.
|
||||
func isProcessRunning(pid int) bool {
|
||||
if pid <= 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
if _, err := findProcess(pid); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
handle, err := syscall.OpenProcess(processQueryLimitedInformation, false, uint32(pid))
|
||||
if err != nil {
|
||||
if errors.Is(err, syscall.ERROR_ACCESS_DENIED) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
defer syscall.CloseHandle(handle)
|
||||
|
||||
var exitCode uint32
|
||||
if err := syscall.GetExitCodeProcess(handle, &exitCode); err != nil {
|
||||
return true
|
||||
}
|
||||
|
||||
return exitCode == stillActive
|
||||
}
|
||||
|
||||
// getProcessStartTime returns the start time of a process on Windows.
|
||||
// Returns zero time if the start time cannot be determined.
|
||||
func getProcessStartTime(pid int) time.Time {
|
||||
if pid <= 0 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
handle, err := syscall.OpenProcess(processQueryLimitedInformation, false, uint32(pid))
|
||||
if err != nil {
|
||||
return time.Time{}
|
||||
}
|
||||
defer syscall.CloseHandle(handle)
|
||||
|
||||
var creationTime, exitTime, kernelTime, userTime syscall.Filetime
|
||||
ret, _, _ := getProcessTimes.Call(
|
||||
uintptr(handle),
|
||||
uintptr(unsafe.Pointer(&creationTime)),
|
||||
uintptr(unsafe.Pointer(&exitTime)),
|
||||
uintptr(unsafe.Pointer(&kernelTime)),
|
||||
uintptr(unsafe.Pointer(&userTime)),
|
||||
)
|
||||
|
||||
if ret == 0 {
|
||||
return time.Time{}
|
||||
}
|
||||
|
||||
return fileTimeToUnixFn(creationTime)
|
||||
}
|
||||
|
||||
// fileTimeToUnix converts Windows FILETIME to Unix time.
|
||||
func fileTimeToUnix(ft syscall.Filetime) time.Time {
|
||||
// FILETIME is 100-nanosecond intervals since January 1, 1601 UTC
|
||||
nsec := ft.Nanoseconds()
|
||||
return time.Unix(0, nsec)
|
||||
}
|
||||
@@ -1,126 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultWrapperName = "codeagent-wrapper"
|
||||
legacyWrapperName = "codex-wrapper"
|
||||
)
|
||||
|
||||
var executablePathFn = os.Executable
|
||||
|
||||
func normalizeWrapperName(path string) string {
|
||||
if path == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
base := filepath.Base(path)
|
||||
base = strings.TrimSuffix(base, ".exe") // tolerate Windows executables
|
||||
|
||||
switch base {
|
||||
case defaultWrapperName, legacyWrapperName:
|
||||
return base
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// currentWrapperName resolves the wrapper name based on the invoked binary.
|
||||
// Only known names are honored to avoid leaking build/test binary names into logs.
|
||||
func currentWrapperName() string {
|
||||
if len(os.Args) == 0 {
|
||||
return defaultWrapperName
|
||||
}
|
||||
|
||||
if name := normalizeWrapperName(os.Args[0]); name != "" {
|
||||
return name
|
||||
}
|
||||
|
||||
execPath, err := executablePathFn()
|
||||
if err == nil {
|
||||
if name := normalizeWrapperName(execPath); name != "" {
|
||||
return name
|
||||
}
|
||||
|
||||
if resolved, err := filepath.EvalSymlinks(execPath); err == nil {
|
||||
if name := normalizeWrapperName(resolved); name != "" {
|
||||
return name
|
||||
}
|
||||
if alias := resolveAlias(execPath, resolved); alias != "" {
|
||||
return alias
|
||||
}
|
||||
}
|
||||
|
||||
if alias := resolveAlias(execPath, ""); alias != "" {
|
||||
return alias
|
||||
}
|
||||
}
|
||||
|
||||
return defaultWrapperName
|
||||
}
|
||||
|
||||
// logPrefixes returns the set of accepted log name prefixes, including the
|
||||
// current wrapper name and legacy aliases.
|
||||
func logPrefixes() []string {
|
||||
prefixes := []string{currentWrapperName(), defaultWrapperName, legacyWrapperName}
|
||||
seen := make(map[string]struct{}, len(prefixes))
|
||||
var unique []string
|
||||
for _, prefix := range prefixes {
|
||||
if prefix == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := seen[prefix]; ok {
|
||||
continue
|
||||
}
|
||||
seen[prefix] = struct{}{}
|
||||
unique = append(unique, prefix)
|
||||
}
|
||||
return unique
|
||||
}
|
||||
|
||||
// primaryLogPrefix returns the preferred filename prefix for log files.
|
||||
// Defaults to the current wrapper name when available, otherwise falls back
|
||||
// to the canonical default name.
|
||||
func primaryLogPrefix() string {
|
||||
prefixes := logPrefixes()
|
||||
if len(prefixes) == 0 {
|
||||
return defaultWrapperName
|
||||
}
|
||||
return prefixes[0]
|
||||
}
|
||||
|
||||
func resolveAlias(execPath string, target string) string {
|
||||
if execPath == "" {
|
||||
return ""
|
||||
}
|
||||
|
||||
dir := filepath.Dir(execPath)
|
||||
for _, candidate := range []string{defaultWrapperName, legacyWrapperName} {
|
||||
aliasPath := filepath.Join(dir, candidate)
|
||||
info, err := os.Lstat(aliasPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if info.Mode()&os.ModeSymlink == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
resolved, err := filepath.EvalSymlinks(aliasPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if target != "" && resolved != target {
|
||||
continue
|
||||
}
|
||||
|
||||
if name := normalizeWrapperName(aliasPath); name != "" {
|
||||
return name
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCurrentWrapperNameFallsBackToExecutable(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
tempDir := t.TempDir()
|
||||
execPath := filepath.Join(tempDir, "codeagent-wrapper")
|
||||
if err := os.WriteFile(execPath, []byte("#!/bin/true\n"), 0o755); err != nil {
|
||||
t.Fatalf("failed to write fake binary: %v", err)
|
||||
}
|
||||
|
||||
os.Args = []string{filepath.Join(tempDir, "custom-name")}
|
||||
executablePathFn = func() (string, error) {
|
||||
return execPath, nil
|
||||
}
|
||||
|
||||
if got := currentWrapperName(); got != defaultWrapperName {
|
||||
t.Fatalf("currentWrapperName() = %q, want %q", got, defaultWrapperName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCurrentWrapperNameDetectsLegacyAliasSymlink(t *testing.T) {
|
||||
defer resetTestHooks()
|
||||
|
||||
tempDir := t.TempDir()
|
||||
execPath := filepath.Join(tempDir, "wrapper")
|
||||
aliasPath := filepath.Join(tempDir, legacyWrapperName)
|
||||
|
||||
if err := os.WriteFile(execPath, []byte("#!/bin/true\n"), 0o755); err != nil {
|
||||
t.Fatalf("failed to write fake binary: %v", err)
|
||||
}
|
||||
if err := os.Symlink(execPath, aliasPath); err != nil {
|
||||
t.Fatalf("failed to create alias: %v", err)
|
||||
}
|
||||
|
||||
os.Args = []string{filepath.Join(tempDir, "unknown-runner")}
|
||||
executablePathFn = func() (string, error) {
|
||||
return execPath, nil
|
||||
}
|
||||
|
||||
if got := currentWrapperName(); got != legacyWrapperName {
|
||||
t.Fatalf("currentWrapperName() = %q, want %q", got, legacyWrapperName)
|
||||
}
|
||||
}
|
||||
112
config.json
112
config.json
@@ -93,7 +93,7 @@
|
||||
]
|
||||
},
|
||||
"essentials": {
|
||||
"enabled": true,
|
||||
"enabled": false,
|
||||
"description": "Core development commands and utilities",
|
||||
"operations": [
|
||||
{
|
||||
@@ -108,6 +108,116 @@
|
||||
"description": "Copy development commands documentation"
|
||||
}
|
||||
]
|
||||
},
|
||||
"omo": {
|
||||
"enabled": false,
|
||||
"description": "OmO multi-agent orchestration with Sisyphus coordinator",
|
||||
"operations": [
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/SKILL.md",
|
||||
"target": "skills/omo/SKILL.md",
|
||||
"description": "Install omo skill"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/oracle.md",
|
||||
"target": "skills/omo/references/oracle.md",
|
||||
"description": "Install oracle agent prompt"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/librarian.md",
|
||||
"target": "skills/omo/references/librarian.md",
|
||||
"description": "Install librarian agent prompt"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/explore.md",
|
||||
"target": "skills/omo/references/explore.md",
|
||||
"description": "Install explore agent prompt"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/frontend-ui-ux-engineer.md",
|
||||
"target": "skills/omo/references/frontend-ui-ux-engineer.md",
|
||||
"description": "Install frontend-ui-ux-engineer agent prompt"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/document-writer.md",
|
||||
"target": "skills/omo/references/document-writer.md",
|
||||
"description": "Install document-writer agent prompt"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/omo/references/develop.md",
|
||||
"target": "skills/omo/references/develop.md",
|
||||
"description": "Install develop agent prompt"
|
||||
}
|
||||
]
|
||||
},
|
||||
"sparv": {
|
||||
"enabled": false,
|
||||
"description": "SPARV workflow (Specify→Plan→Act→Review→Vault) with 10-point gate",
|
||||
"operations": [
|
||||
{
|
||||
"type": "copy_dir",
|
||||
"source": "skills/sparv",
|
||||
"target": "skills/sparv",
|
||||
"description": "Install sparv skill with all scripts and hooks"
|
||||
}
|
||||
]
|
||||
},
|
||||
"feature-dev": {
|
||||
"enabled": false,
|
||||
"description": "7-phase feature development workflow with codeagent orchestration",
|
||||
"operations": [
|
||||
{
|
||||
"type": "copy_dir",
|
||||
"source": "skills/feature-dev",
|
||||
"target": "skills/feature-dev",
|
||||
"description": "Install feature-dev skill with hooks"
|
||||
}
|
||||
]
|
||||
},
|
||||
"course": {
|
||||
"enabled": false,
|
||||
"description": "课程开发工作流,包含 dev、产品需求和测试用例技能",
|
||||
"operations": [
|
||||
{
|
||||
"type": "copy_dir",
|
||||
"source": "skills/dev",
|
||||
"target": "skills/dev",
|
||||
"description": "Install dev skill with agents"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/product-requirements/SKILL.md",
|
||||
"target": "skills/product-requirements/SKILL.md",
|
||||
"description": "Install product-requirements skill"
|
||||
},
|
||||
{
|
||||
"type": "copy_dir",
|
||||
"source": "skills/test-cases",
|
||||
"target": "skills/test-cases",
|
||||
"description": "Install test-cases skill with references"
|
||||
},
|
||||
{
|
||||
"type": "copy_file",
|
||||
"source": "skills/codeagent/SKILL.md",
|
||||
"target": "skills/codeagent/SKILL.md",
|
||||
"description": "Install codeagent skill"
|
||||
},
|
||||
{
|
||||
"type": "run_command",
|
||||
"command": "bash install.sh",
|
||||
"description": "Install codeagent-wrapper binary",
|
||||
"env": {
|
||||
"INSTALL_DIR": "${install_dir}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
9
dev-workflow/.claude-plugin/plugin.json
Normal file
9
dev-workflow/.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "dev",
|
||||
"description": "Lightweight development workflow with requirements clarification, parallel codex execution, and mandatory 90% test coverage",
|
||||
"version": "5.6.1",
|
||||
"author": {
|
||||
"name": "cexll",
|
||||
"email": "cexll@cexll.com"
|
||||
}
|
||||
}
|
||||
@@ -9,42 +9,56 @@ A freshly designed lightweight development workflow with no legacy baggage, focu
|
||||
```
|
||||
/dev trigger
|
||||
↓
|
||||
AskUserQuestion (backend selection)
|
||||
↓
|
||||
AskUserQuestion (requirements clarification)
|
||||
↓
|
||||
codeagent analysis (plan mode + UI auto-detection)
|
||||
codeagent analysis (plan mode + task typing + UI auto-detection)
|
||||
↓
|
||||
dev-plan-generator (create dev doc)
|
||||
↓
|
||||
codeagent concurrent development (2–5 tasks, backend split)
|
||||
codeagent concurrent development (2–5 tasks, backend routing)
|
||||
↓
|
||||
codeagent testing & verification (≥90% coverage)
|
||||
↓
|
||||
Done (generate summary)
|
||||
```
|
||||
|
||||
## The 6 Steps
|
||||
## Step 0 + The 6 Steps
|
||||
|
||||
### 0. Select Allowed Backends (FIRST ACTION)
|
||||
- Use **AskUserQuestion** with multiSelect to ask which backends are allowed for this run
|
||||
- Options (user can select multiple):
|
||||
- `codex` - Stable, high quality, best cost-performance (default for most tasks)
|
||||
- `claude` - Fast, lightweight (for quick fixes and config changes)
|
||||
- `gemini` - UI/UX specialist (for frontend styling and components)
|
||||
- If user selects ONLY `codex`, ALL subsequent tasks must use `codex` (including UI/quick-fix)
|
||||
|
||||
### 1. Clarify Requirements
|
||||
- Use **AskUserQuestion** to ask the user directly
|
||||
- No scoring system, no complex logic
|
||||
- 2–3 rounds of Q&A until the requirement is clear
|
||||
|
||||
### 2. codeagent Analysis & UI Detection
|
||||
### 2. codeagent Analysis + Task Typing + UI Detection
|
||||
- Call codeagent to analyze the request in plan mode style
|
||||
- Extract: core functions, technical points, task list (2–5 items)
|
||||
- For each task, assign exactly one type: `default` / `ui` / `quick-fix`
|
||||
- UI auto-detection: needs UI work when task involves style assets (.css, .scss, styled-components, CSS modules, tailwindcss) OR frontend component files (.tsx, .jsx, .vue); output yes/no plus evidence
|
||||
|
||||
### 3. Generate Dev Doc
|
||||
- Call the **dev-plan-generator** agent
|
||||
- Produce a single `dev-plan.md`
|
||||
- Append a dedicated UI task when Step 2 marks `needs_ui: true`
|
||||
- Include: task breakdown, file scope, dependencies, test commands
|
||||
- Include: task breakdown, `type`, file scope, dependencies, test commands
|
||||
|
||||
### 4. Concurrent Development
|
||||
- Work from the task list in dev-plan.md
|
||||
- Use codeagent per task with explicit backend selection:
|
||||
- Backend/API/DB tasks → `--backend codex` (default)
|
||||
- UI/style/component tasks → `--backend gemini` (enforced)
|
||||
- Route backend per task type (with user constraints + fallback):
|
||||
- `default` → `codex`
|
||||
- `ui` → `gemini` (enforced when allowed)
|
||||
- `quick-fix` → `claude`
|
||||
- Missing `type` → treat as `default`
|
||||
- If the preferred backend is not allowed, fallback to an allowed backend by priority: `codex` → `claude` → `gemini`
|
||||
- Independent tasks → run in parallel
|
||||
- Conflicting tasks → run serially
|
||||
|
||||
@@ -65,7 +79,7 @@ Done (generate summary)
|
||||
/dev "Implement user login with email + password"
|
||||
```
|
||||
|
||||
**No options**, fixed workflow, works out of the box.
|
||||
No CLI flags required; workflow starts with an interactive backend selection.
|
||||
|
||||
## Output Structure
|
||||
|
||||
@@ -80,14 +94,14 @@ Only one file—minimal and clear.
|
||||
|
||||
### Tools
|
||||
- **AskUserQuestion**: interactive requirement clarification
|
||||
- **codeagent skill**: analysis, development, testing; supports `--backend` for codex (default) or gemini (UI)
|
||||
- **codeagent skill**: analysis, development, testing; supports `--backend` for `codex` / `claude` / `gemini`
|
||||
- **dev-plan-generator agent**: generate dev doc (subagent via Task tool, saves context)
|
||||
|
||||
## UI Auto-Detection & Backend Routing
|
||||
## Backend Selection & Routing
|
||||
- **Step 0**: user selects allowed backends; if `仅 codex`, all tasks use codex
|
||||
- **UI detection standard**: style files (.css, .scss, styled-components, CSS modules, tailwindcss) OR frontend component code (.tsx, .jsx, .vue) trigger `needs_ui: true`
|
||||
- **Flow impact**: Step 2 auto-detects UI work; Step 3 appends a separate UI task in `dev-plan.md` when detected
|
||||
- **Backend split**: backend/API tasks use codex backend (default); UI tasks force gemini backend
|
||||
- **Implementation**: Orchestrator invokes codeagent skill with appropriate backend parameter per task type
|
||||
- **Task type field**: each task in `dev-plan.md` must have `type: default|ui|quick-fix`
|
||||
- **Routing**: `default`→codex, `ui`→gemini, `quick-fix`→claude; if disallowed, fallback to an allowed backend by priority: codex→claude→gemini
|
||||
|
||||
## Key Features
|
||||
|
||||
@@ -102,9 +116,9 @@ Only one file—minimal and clear.
|
||||
- Steps are straightforward
|
||||
|
||||
### ✅ Concurrency
|
||||
- 2–5 tasks in parallel
|
||||
- Tasks split based on natural functional boundaries
|
||||
- Auto-detect dependencies and conflicts
|
||||
- codeagent executes independently
|
||||
- codeagent executes independently with optimal backend
|
||||
|
||||
### ✅ Quality Assurance
|
||||
- Enforces 90% coverage
|
||||
@@ -117,6 +131,10 @@ Only one file—minimal and clear.
|
||||
# Trigger
|
||||
/dev "Add user login feature"
|
||||
|
||||
# Step 0: Select backends
|
||||
Q: Which backends are allowed? (multiSelect)
|
||||
A: Selected: codex, claude
|
||||
|
||||
# Step 1: Clarify requirements
|
||||
Q: What login methods are supported?
|
||||
A: Email + password
|
||||
@@ -126,18 +144,18 @@ A: Yes, use JWT token
|
||||
# Step 2: codeagent analysis
|
||||
Output:
|
||||
- Core: email/password login + JWT auth
|
||||
- Task 1: Backend API
|
||||
- Task 2: Password hashing
|
||||
- Task 3: Frontend form
|
||||
- Task 1: Backend API (type=default)
|
||||
- Task 2: Password hashing (type=default)
|
||||
- Task 3: Frontend form (type=ui)
|
||||
UI detection: needs_ui = true (tailwindcss classes in frontend form)
|
||||
|
||||
# Step 3: Generate doc
|
||||
dev-plan.md generated with backend + UI tasks ✓
|
||||
dev-plan.md generated with typed tasks ✓
|
||||
|
||||
# Step 4-5: Concurrent development (backend codex, UI gemini)
|
||||
# Step 4-5: Concurrent development (routing + fallback)
|
||||
[task-1] Backend API (codex) → tests → 92% ✓
|
||||
[task-2] Password hashing (codex) → tests → 95% ✓
|
||||
[task-3] Frontend form (gemini) → tests → 91% ✓
|
||||
[task-3] Frontend form (fallback to codex; gemini not allowed) → tests → 91% ✓
|
||||
```
|
||||
|
||||
## Directory Structure
|
||||
|
||||
@@ -12,7 +12,7 @@ You are a specialized Development Plan Document Generator. Your sole responsibil
|
||||
|
||||
You receive context from an orchestrator including:
|
||||
- Feature requirements description
|
||||
- codeagent analysis results (feature highlights, task decomposition, UI detection flag)
|
||||
- codeagent analysis results (feature highlights, task decomposition, UI detection flag, and task typing hints)
|
||||
- Feature name (in kebab-case format)
|
||||
|
||||
Your output is a single file: `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
@@ -29,6 +29,7 @@ Your output is a single file: `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
|
||||
### Task 1: [Task Name]
|
||||
- **ID**: task-1
|
||||
- **type**: default|ui|quick-fix
|
||||
- **Description**: [What needs to be done]
|
||||
- **File Scope**: [Directories or files involved, e.g., src/auth/**, tests/auth/]
|
||||
- **Dependencies**: [None or depends on task-x]
|
||||
@@ -38,7 +39,7 @@ Your output is a single file: `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
### Task 2: [Task Name]
|
||||
...
|
||||
|
||||
(2-5 tasks)
|
||||
(Tasks based on natural functional boundaries, typically 2-5)
|
||||
|
||||
## Acceptance Criteria
|
||||
- [ ] Feature point 1
|
||||
@@ -53,9 +54,13 @@ Your output is a single file: `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
|
||||
## Generation Rules You Must Enforce
|
||||
|
||||
1. **Task Count**: Generate 2-5 tasks (no more, no less unless the feature is extremely simple or complex)
|
||||
1. **Task Count**: Generate tasks based on natural functional boundaries (no artificial limits)
|
||||
- Typical range: 2-5 tasks
|
||||
- Quality over quantity: prefer fewer well-scoped tasks over excessive fragmentation
|
||||
- Each task should be independently completable by one agent
|
||||
2. **Task Requirements**: Each task MUST include:
|
||||
- Clear ID (task-1, task-2, etc.)
|
||||
- A single task type field: `type: default|ui|quick-fix`
|
||||
- Specific description of what needs to be done
|
||||
- Explicit file scope (directories or files affected)
|
||||
- Dependency declaration ("None" or "depends on task-x")
|
||||
@@ -67,18 +72,23 @@ Your output is a single file: `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
|
||||
## Your Workflow
|
||||
|
||||
1. **Analyze Input**: Review the requirements description and codeagent analysis results (including `needs_ui` flag if present)
|
||||
1. **Analyze Input**: Review the requirements description and codeagent analysis results (including `needs_ui` and any task typing hints)
|
||||
2. **Identify Tasks**: Break down the feature into 2-5 logical, independent tasks
|
||||
3. **Determine Dependencies**: Map out which tasks depend on others (minimize dependencies)
|
||||
4. **Specify Testing**: For each task, define the exact test command and coverage requirements
|
||||
5. **Define Acceptance**: List concrete, measurable acceptance criteria including the 90% coverage requirement
|
||||
6. **Document Technical Points**: Note key technical decisions and constraints
|
||||
7. **Write File**: Use the Write tool to create `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
4. **Assign Task Type**: For each task, set exactly one `type`:
|
||||
- `ui`: touches UI/style/component work (e.g., .css/.scss/.tsx/.jsx/.vue, tailwind, design tweaks)
|
||||
- `quick-fix`: small, fast changes (config tweaks, small bug fix, minimal scope); do NOT use for UI work
|
||||
- `default`: everything else
|
||||
- Note: `/dev` Step 4 routes backend by `type` (default→codex, ui→gemini, quick-fix→claude; missing type → default)
|
||||
5. **Specify Testing**: For each task, define the exact test command and coverage requirements
|
||||
6. **Define Acceptance**: List concrete, measurable acceptance criteria including the 90% coverage requirement
|
||||
7. **Document Technical Points**: Note key technical decisions and constraints
|
||||
8. **Write File**: Use the Write tool to create `./.claude/specs/{feature_name}/dev-plan.md`
|
||||
|
||||
## Quality Checks Before Writing
|
||||
|
||||
- [ ] Task count is between 2-5
|
||||
- [ ] Every task has all 6 required fields (ID, Description, File Scope, Dependencies, Test Command, Test Focus)
|
||||
- [ ] Every task has all required fields (ID, type, Description, File Scope, Dependencies, Test Command, Test Focus)
|
||||
- [ ] Test commands include coverage parameters
|
||||
- [ ] Dependencies are explicitly stated
|
||||
- [ ] Acceptance criteria includes 90% coverage requirement
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
---
|
||||
description: Extreme lightweight end-to-end development workflow with requirements clarification, parallel codeagent execution, and mandatory 90% test coverage
|
||||
description: Extreme lightweight end-to-end development workflow with requirements clarification, intelligent backend selection, parallel codeagent execution, and mandatory 90% test coverage
|
||||
---
|
||||
|
||||
You are the /dev Workflow Orchestrator, an expert development workflow manager specializing in orchestrating minimal, efficient end-to-end development processes with parallel task execution and rigorous test coverage validation.
|
||||
@@ -11,28 +11,40 @@ You are the /dev Workflow Orchestrator, an expert development workflow manager s
|
||||
These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
|
||||
1. **NEVER use Edit, Write, or MultiEdit tools directly** - ALL code changes MUST go through codeagent-wrapper
|
||||
2. **MUST use AskUserQuestion in Step 1** - Do NOT skip requirement clarification
|
||||
3. **MUST use TodoWrite after Step 1** - Create task tracking list before any analysis
|
||||
4. **MUST use codeagent-wrapper for Step 2 analysis** - Do NOT use Read/Glob/Grep directly for deep analysis
|
||||
5. **MUST wait for user confirmation in Step 3** - Do NOT proceed to Step 4 without explicit approval
|
||||
6. **MUST invoke codeagent-wrapper --parallel for Step 4 execution** - Use Bash tool, NOT Edit/Write or Task tool
|
||||
2. **MUST use AskUserQuestion in Step 0** - Backend selection MUST be the FIRST action (before requirement clarification)
|
||||
3. **MUST use AskUserQuestion in Step 1** - Do NOT skip requirement clarification
|
||||
4. **MUST use TodoWrite after Step 1** - Create task tracking list before any analysis
|
||||
5. **MUST use codeagent-wrapper for Step 2 analysis** - Do NOT use Read/Glob/Grep directly for deep analysis
|
||||
6. **MUST wait for user confirmation in Step 3** - Do NOT proceed to Step 4 without explicit approval
|
||||
7. **MUST invoke codeagent-wrapper --parallel for Step 4 execution** - Use Bash tool, NOT Edit/Write or Task tool
|
||||
|
||||
**Violation of any constraint above invalidates the entire workflow. Stop and restart if violated.**
|
||||
|
||||
---
|
||||
|
||||
**Core Responsibilities**
|
||||
- Orchestrate a streamlined 6-step development workflow:
|
||||
- Orchestrate a streamlined 7-step development workflow (Step 0 + Step 1–6):
|
||||
0. Backend selection (user constrained)
|
||||
1. Requirement clarification through targeted questioning
|
||||
2. Technical analysis using codeagent
|
||||
2. Technical analysis using codeagent-wrapper
|
||||
3. Development documentation generation
|
||||
4. Parallel development execution
|
||||
4. Parallel development execution (backend routing per task type)
|
||||
5. Coverage validation (≥90% requirement)
|
||||
6. Completion summary
|
||||
|
||||
**Workflow Execution**
|
||||
- **Step 0: Backend Selection [MANDATORY - FIRST ACTION]**
|
||||
- MUST use AskUserQuestion tool as the FIRST action with multiSelect enabled
|
||||
- Ask which backends are allowed for this /dev run
|
||||
- Options (user can select multiple):
|
||||
- `codex` - Stable, high quality, best cost-performance (default for most tasks)
|
||||
- `claude` - Fast, lightweight (for quick fixes and config changes)
|
||||
- `gemini` - UI/UX specialist (for frontend styling and components)
|
||||
- Store the selected backends as `allowed_backends` set for routing in Step 4
|
||||
- Special rule: if user selects ONLY `codex`, then ALL subsequent tasks (including UI/quick-fix) MUST use `codex` (no exceptions)
|
||||
|
||||
- **Step 1: Requirement Clarification [MANDATORY - DO NOT SKIP]**
|
||||
- MUST use AskUserQuestion tool as the FIRST action - no exceptions
|
||||
- MUST use AskUserQuestion tool
|
||||
- Focus questions on functional boundaries, inputs/outputs, constraints, testing, and required unit-test coverage levels
|
||||
- Iterate 2-3 rounds until clear; rely on judgment; keep questions concise
|
||||
- After clarification complete: MUST use TodoWrite to create task tracking list with workflow steps
|
||||
@@ -43,7 +55,10 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
|
||||
**How to invoke for analysis**:
|
||||
```bash
|
||||
codeagent-wrapper --backend codex - <<'EOF'
|
||||
# analysis_backend selection:
|
||||
# - prefer codex if it is in allowed_backends
|
||||
# - otherwise pick the first backend in allowed_backends
|
||||
codeagent-wrapper --backend {analysis_backend} - <<'EOF'
|
||||
Analyze the codebase for implementing [feature name].
|
||||
|
||||
Requirements:
|
||||
@@ -54,8 +69,9 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
1. Explore codebase structure and existing patterns
|
||||
2. Evaluate implementation options with trade-offs
|
||||
3. Make architectural decisions
|
||||
4. Break down into 2-5 parallelizable tasks with dependencies
|
||||
5. Determine if UI work is needed (check for .css/.tsx/.vue files)
|
||||
4. Break down into 2-5 parallelizable tasks with dependencies and file scope
|
||||
5. Classify each task with a single `type`: `default` / `ui` / `quick-fix`
|
||||
6. Determine if UI work is needed (check for .css/.tsx/.vue files)
|
||||
|
||||
Output the analysis following the structure below.
|
||||
EOF
|
||||
@@ -76,7 +92,7 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
2. **Identify Existing Patterns**: Find how similar features are implemented, reuse conventions
|
||||
3. **Evaluate Options**: When multiple approaches exist, list trade-offs (complexity, performance, security, maintainability)
|
||||
4. **Make Architectural Decisions**: Choose patterns, APIs, data models with justification
|
||||
5. **Design Task Breakdown**: Produce 2-5 parallelizable tasks with file scope and dependencies
|
||||
5. **Design Task Breakdown**: Produce parallelizable tasks based on natural functional boundaries with file scope and dependencies
|
||||
|
||||
**Analysis Output Structure**:
|
||||
```
|
||||
@@ -93,7 +109,7 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
[API design, data models, architecture choices made]
|
||||
|
||||
## Task Breakdown
|
||||
[2-5 tasks with: ID, description, file scope, dependencies, test command]
|
||||
[2-5 tasks with: ID, description, file scope, dependencies, test command, type(default|ui|quick-fix)]
|
||||
|
||||
## UI Determination
|
||||
needs_ui: [true/false]
|
||||
@@ -107,27 +123,37 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
|
||||
- **Step 3: Generate Development Documentation**
|
||||
- invoke agent dev-plan-generator
|
||||
- When creating `dev-plan.md`, append a dedicated UI task if Step 2 marked `needs_ui: true`
|
||||
- When creating `dev-plan.md`, ensure every task has `type: default|ui|quick-fix`
|
||||
- Append a dedicated UI task if Step 2 marked `needs_ui: true` but no UI task exists
|
||||
- Output a brief summary of dev-plan.md:
|
||||
- Number of tasks and their IDs
|
||||
- Task type for each task
|
||||
- File scope for each task
|
||||
- Dependencies between tasks
|
||||
- Test commands
|
||||
- Use AskUserQuestion to confirm with user:
|
||||
- Question: "Proceed with this development plan?" (if UI work is detected, state that UI tasks will use the gemini backend)
|
||||
- Question: "Proceed with this development plan?" (state backend routing rules and any forced fallback due to allowed_backends)
|
||||
- Options: "Confirm and execute" / "Need adjustments"
|
||||
- If user chooses "Need adjustments", return to Step 1 or Step 2 based on feedback
|
||||
|
||||
- **Step 4: Parallel Development Execution [CODEAGENT-WRAPPER ONLY - NO DIRECT EDITS]**
|
||||
- MUST use Bash tool to invoke `codeagent-wrapper --parallel` for ALL code changes
|
||||
- NEVER use Edit, Write, MultiEdit, or Task tools to modify code directly
|
||||
- Backend routing (must be deterministic and enforceable):
|
||||
- Task field: `type: default|ui|quick-fix` (missing → treat as `default`)
|
||||
- Preferred backend by type:
|
||||
- `default` → `codex`
|
||||
- `ui` → `gemini` (enforced when allowed)
|
||||
- `quick-fix` → `claude`
|
||||
- If user selected `仅 codex`: all tasks MUST use `codex`
|
||||
- Otherwise, if preferred backend is not in `allowed_backends`, fallback to the first available backend by priority: `codex` → `claude` → `gemini`
|
||||
- Build ONE `--parallel` config that includes all tasks in `dev-plan.md` and submit it once via Bash tool:
|
||||
```bash
|
||||
# One shot submission - wrapper handles topology + concurrency
|
||||
codeagent-wrapper --parallel <<'EOF'
|
||||
---TASK---
|
||||
id: [task-id-1]
|
||||
backend: codex
|
||||
backend: [routed-backend-from-type-and-allowed_backends]
|
||||
workdir: .
|
||||
dependencies: [optional, comma-separated ids]
|
||||
---CONTENT---
|
||||
@@ -139,7 +165,7 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
|
||||
---TASK---
|
||||
id: [task-id-2]
|
||||
backend: gemini
|
||||
backend: [routed-backend-from-type-and-allowed_backends]
|
||||
workdir: .
|
||||
dependencies: [optional, comma-separated ids]
|
||||
---CONTENT---
|
||||
@@ -152,6 +178,7 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
```
|
||||
- **Note**: Use `workdir: .` (current directory) for all tasks unless specific subdirectory is required
|
||||
- Execute independent tasks concurrently; serialize conflicting ones; track coverage reports
|
||||
- Backend is routed deterministically based on task `type`, no manual intervention needed
|
||||
|
||||
- **Step 5: Coverage Validation**
|
||||
- Validate each task’s coverage:
|
||||
@@ -168,11 +195,13 @@ These rules have HIGHEST PRIORITY and override all other instructions:
|
||||
- Circular dependencies: codeagent-wrapper will detect and fail with error; revise task breakdown to remove cycles
|
||||
- Missing dependencies: Ensure all task IDs referenced in `dependencies` field exist
|
||||
- **Parallel execution timeout**: Individual tasks timeout after 2 hours (configurable via CODEX_TIMEOUT); failed tasks can be retried individually
|
||||
- **Backend unavailable**: If codex/claude/gemini CLI not found, fail immediately with clear error message
|
||||
- **Backend unavailable**: If a routed backend is unavailable, fallback to another backend in `allowed_backends` (priority: codex → claude → gemini); if none works, fail with a clear error message
|
||||
|
||||
**Quality Standards**
|
||||
- Code coverage ≥90%
|
||||
- 2-5 genuinely parallelizable tasks
|
||||
- Tasks based on natural functional boundaries (typically 2-5)
|
||||
- Each task has exactly one `type: default|ui|quick-fix`
|
||||
- Backend routed by `type`: `default`→codex, `ui`→gemini, `quick-fix`→claude (with allowed_backends fallback)
|
||||
- Documentation must be minimal yet actionable
|
||||
- No verbose implementations; only essential code
|
||||
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
{
|
||||
"name": "development-essentials",
|
||||
"source": "./",
|
||||
"description": "Essential development commands for coding, debugging, testing, optimization, and documentation",
|
||||
"version": "1.0.0",
|
||||
"author": {
|
||||
"name": "Claude Code Dev Workflows",
|
||||
"url": "https://github.com/cexll/myclaude"
|
||||
},
|
||||
"homepage": "https://github.com/cexll/myclaude",
|
||||
"repository": "https://github.com/cexll/myclaude",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"code",
|
||||
"debug",
|
||||
"test",
|
||||
"optimize",
|
||||
"review",
|
||||
"bugfix",
|
||||
"refactor",
|
||||
"documentation"
|
||||
],
|
||||
"category": "essentials",
|
||||
"strict": false,
|
||||
"commands": [
|
||||
"./commands/code.md",
|
||||
"./commands/debug.md",
|
||||
"./commands/test.md",
|
||||
"./commands/optimize.md",
|
||||
"./commands/review.md",
|
||||
"./commands/bugfix.md",
|
||||
"./commands/refactor.md",
|
||||
"./commands/docs.md",
|
||||
"./commands/ask.md",
|
||||
"./commands/think.md"
|
||||
],
|
||||
"agents": [
|
||||
"./agents/code.md",
|
||||
"./agents/bugfix.md",
|
||||
"./agents/bugfix-verify.md",
|
||||
"./agents/optimize.md",
|
||||
"./agents/debug.md"
|
||||
]
|
||||
}
|
||||
9
development-essentials/.claude-plugin/plugin.json
Normal file
9
development-essentials/.claude-plugin/plugin.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "essentials",
|
||||
"description": "Essential development commands for coding, debugging, testing, optimization, and documentation",
|
||||
"version": "5.6.1",
|
||||
"author": {
|
||||
"name": "cexll",
|
||||
"email": "cexll@cexll.com"
|
||||
}
|
||||
}
|
||||
@@ -322,6 +322,8 @@ Error: dependency backend_1701234567 failed
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `CODEX_TIMEOUT` | 7200000 | Timeout in milliseconds |
|
||||
| `CODEX_BYPASS_SANDBOX` | true | Bypass Codex sandbox/approval. Set `false` to disable |
|
||||
| `CODEAGENT_SKIP_PERMISSIONS` | true | Skip Claude permission prompts. Set `false` to disable |
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
||||
143
go.work.sum
Normal file
143
go.work.sum
Normal file
@@ -0,0 +1,143 @@
|
||||
cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM=
|
||||
cloud.google.com/go v0.112.1/go.mod h1:+Vbu+Y1UU+I1rjmzeMOb/8RfkKJK2Gyxi1X6jJCZLo4=
|
||||
cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg=
|
||||
cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40=
|
||||
cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY=
|
||||
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
|
||||
cloud.google.com/go/firestore v1.15.0 h1:/k8ppuWOtNuDHt2tsRV42yI21uaGnKDEQnRFeBpbFF8=
|
||||
cloud.google.com/go/firestore v1.15.0/go.mod h1:GWOxFXcv8GZUtYpWHw/w6IuYNux/BtmeVTMmjrm4yhk=
|
||||
cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI=
|
||||
cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8=
|
||||
cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg=
|
||||
cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s=
|
||||
cloud.google.com/go/storage v1.35.1 h1:B59ahL//eDfx2IIKFBeT5Atm9wnNmj3+8xG/W4WB//w=
|
||||
cloud.google.com/go/storage v1.35.1/go.mod h1:M6M/3V/D3KpzMTJyPOR/HU6n2Si5QdaXYEsng2xgOs8=
|
||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||
github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.4 h1:wfIWP927BUkWJb2NmU/kNDYIBTh/ziUX91+lVfRxZq4=
|
||||
github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w=
|
||||
github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg=
|
||||
github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg=
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ=
|
||||
github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/godbus/dbus/v5 v5.0.4 h1:9349emZab16e7zQvpmsbtjc18ykshndd8y2PG3sgJbA=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
||||
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
|
||||
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
|
||||
github.com/googleapis/gax-go/v2 v2.12.3 h1:5/zPPDvw8Q1SuXjrqrZslrqT7dL/uJT2CQii/cLCKqA=
|
||||
github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4=
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720 h1:zC34cGQu69FG7qzJ3WiKW244WfhDC3xxYMeNOX2gtUQ=
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
|
||||
github.com/hashicorp/consul/api v1.28.2 h1:mXfkRHrpHN4YY3RqL09nXU1eHKLNiuAN4kHvDQ16k/8=
|
||||
github.com/hashicorp/consul/api v1.28.2/go.mod h1:KyzqzgMEya+IZPcD65YFoOVAgPpbfERu4I/tzG6/ueE=
|
||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||
github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c=
|
||||
github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
|
||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY=
|
||||
github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/compress v1.17.2 h1:RlWWUY/Dr4fL8qk9YG7DTZ7PDgME2V4csBXA8L/ixi4=
|
||||
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
|
||||
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
|
||||
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
||||
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
|
||||
github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
|
||||
github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
|
||||
github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/sagikazarmark/crypt v0.19.0 h1:WMyLTjHBo64UvNcWqpzY3pbZTYgnemZU8FBZigKc42E=
|
||||
github.com/sagikazarmark/crypt v0.19.0/go.mod h1:c6vimRziqqERhtSe0MhIvzE1w54FrCHtrXb5NH/ja78=
|
||||
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
|
||||
go.etcd.io/etcd/api/v3 v3.5.12 h1:W4sw5ZoU2Juc9gBWuLk5U6fHfNVyY1WC5g9uiXZio/c=
|
||||
go.etcd.io/etcd/api/v3 v3.5.12/go.mod h1:Ot+o0SWSyT6uHhA56al1oCED0JImsRiU9Dc26+C2a+4=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.12 h1:EYDL6pWwyOsylrQyLp2w+HkQ46ATiOvoEdMarindU2A=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.12/go.mod h1:seTzl2d9APP8R5Y2hFL3NVlD6qC/dOT+3kvrqPyTas4=
|
||||
go.etcd.io/etcd/client/v2 v2.305.12 h1:0m4ovXYo1CHaA/Mp3X/Fak5sRNIWf01wk/X1/G3sGKI=
|
||||
go.etcd.io/etcd/client/v2 v2.305.12/go.mod h1:aQ/yhsxMu+Oht1FOupSr60oBvcS9cKXHrzBpDsPTf9E=
|
||||
go.etcd.io/etcd/client/v3 v3.5.12 h1:v5lCPXn1pf1Uu3M4laUE2hp/geOTc5uPcYYsNe1lDxg=
|
||||
go.etcd.io/etcd/client/v3 v3.5.12/go.mod h1:tSbBCakoWmmddL+BKVAJHa9km+O/E+bumDe9mSbPiqw=
|
||||
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
|
||||
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 h1:jq9TW8u3so/bN+JPT166wjOI6/vQPF6Xe7nMNIltagk=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0/go.mod h1:p8pYQP+m5XfbZm9fxtSKAbM6oIllS7s2AfxrChvc7iw=
|
||||
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||
go.opentelemetry.io/otel v1.24.0/go.mod h1:W7b9Ozg4nkF5tWI5zsXkaKKDjdVjpD4oAt9Qi/MArHo=
|
||||
go.opentelemetry.io/otel/metric v1.24.0 h1:6EhoGWWK28x1fbpA4tYTOWBkPefTDQnb8WSGXlc88kI=
|
||||
go.opentelemetry.io/otel/metric v1.24.0/go.mod h1:VYhLe1rFfxuTXLgj4CBiyz+9WYBA8pNGJgDcSFRKBco=
|
||||
go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y1YELI=
|
||||
go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU=
|
||||
go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8=
|
||||
go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=
|
||||
golang.org/x/crypto v0.21.0 h1:X31++rzVUdKhX5sWmSOFZxx8UW/ldWx55cbf08iNAMA=
|
||||
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
|
||||
golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
|
||||
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
|
||||
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
|
||||
golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
|
||||
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
|
||||
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
|
||||
golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||
google.golang.org/api v0.171.0 h1:w174hnBPqut76FzW5Qaupt7zY8Kql6fiVjgys4f58sU=
|
||||
google.golang.org/api v0.171.0/go.mod h1:Hnq5AHm4OTMt2BUVjael2CWZFD6vksJdWCWiUAmjC9o=
|
||||
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
|
||||
google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9 h1:9+tzLLstTlPTRyJTh+ah5wIMsBW5c4tQwGTN3thOW9Y=
|
||||
google.golang.org/genproto v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:mqHbVIp48Muh7Ywss/AD6I5kNVKZMmAa/QEW58Gxp2s=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 h1:rIo7ocm2roD9DcFIX67Ym8icoGCKSARAiPljFhh5suQ=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c h1:lfpJ/2rWPa/kJgxyyXM8PrNnfCzcmxJ265mADgwmvLI=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
|
||||
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
|
||||
google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
|
||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
86
install.bat
86
install.bat
@@ -46,17 +46,23 @@ echo.
|
||||
echo codeagent-wrapper installed successfully at:
|
||||
echo %DEST%
|
||||
|
||||
rem Automatically ensure %USERPROFILE%\bin is in the USER (HKCU) PATH
|
||||
rem Ensure %USERPROFILE%\bin is in PATH without duplicating entries
|
||||
rem 1) Read current user PATH from registry (REG_SZ or REG_EXPAND_SZ)
|
||||
set "USER_PATH_RAW="
|
||||
set "USER_PATH_TYPE="
|
||||
for /f "tokens=1,2,*" %%A in ('reg query "HKCU\Environment" /v Path 2^>nul ^| findstr /I /R "^ *Path *REG_"') do (
|
||||
set "USER_PATH_TYPE=%%B"
|
||||
set "USER_PATH_RAW=%%C"
|
||||
)
|
||||
rem Trim leading spaces from USER_PATH_RAW
|
||||
for /f "tokens=* delims= " %%D in ("!USER_PATH_RAW!") do set "USER_PATH_RAW=%%D"
|
||||
|
||||
rem 2) Read current system PATH from registry (REG_SZ or REG_EXPAND_SZ)
|
||||
set "SYS_PATH_RAW="
|
||||
for /f "tokens=1,2,*" %%A in ('reg query "HKLM\System\CurrentControlSet\Control\Session Manager\Environment" /v Path 2^>nul ^| findstr /I /R "^ *Path *REG_"') do (
|
||||
set "SYS_PATH_RAW=%%C"
|
||||
)
|
||||
rem Trim leading spaces from SYS_PATH_RAW
|
||||
for /f "tokens=* delims= " %%D in ("!SYS_PATH_RAW!") do set "SYS_PATH_RAW=%%D"
|
||||
|
||||
rem Normalize DEST_DIR by removing a trailing backslash if present
|
||||
if "!DEST_DIR:~-1!"=="\" set "DEST_DIR=!DEST_DIR:~0,-1!"
|
||||
|
||||
@@ -67,42 +73,70 @@ set "SEARCH_EXP2=;!DEST_DIR!\;"
|
||||
set "SEARCH_LIT=;!PCT!USERPROFILE!PCT!\bin;"
|
||||
set "SEARCH_LIT2=;!PCT!USERPROFILE!PCT!\bin\;"
|
||||
|
||||
rem Prepare user PATH variants for containment tests
|
||||
set "CHECK_RAW=;!USER_PATH_RAW!;"
|
||||
set "USER_PATH_EXP=!USER_PATH_RAW!"
|
||||
if defined USER_PATH_EXP call set "USER_PATH_EXP=%%USER_PATH_EXP%%"
|
||||
set "CHECK_EXP=;!USER_PATH_EXP!;"
|
||||
rem Prepare PATH variants for containment tests (strip quotes to avoid false negatives)
|
||||
set "USER_PATH_RAW_CLEAN=!USER_PATH_RAW:"=!"
|
||||
set "SYS_PATH_RAW_CLEAN=!SYS_PATH_RAW:"=!"
|
||||
|
||||
rem Check if already present in user PATH (literal or expanded, with/without trailing backslash)
|
||||
set "CHECK_USER_RAW=;!USER_PATH_RAW_CLEAN!;"
|
||||
set "USER_PATH_EXP=!USER_PATH_RAW_CLEAN!"
|
||||
if defined USER_PATH_EXP call set "USER_PATH_EXP=%%USER_PATH_EXP%%"
|
||||
set "USER_PATH_EXP_CLEAN=!USER_PATH_EXP:"=!"
|
||||
set "CHECK_USER_EXP=;!USER_PATH_EXP_CLEAN!;"
|
||||
|
||||
set "CHECK_SYS_RAW=;!SYS_PATH_RAW_CLEAN!;"
|
||||
set "SYS_PATH_EXP=!SYS_PATH_RAW_CLEAN!"
|
||||
if defined SYS_PATH_EXP call set "SYS_PATH_EXP=%%SYS_PATH_EXP%%"
|
||||
set "SYS_PATH_EXP_CLEAN=!SYS_PATH_EXP:"=!"
|
||||
set "CHECK_SYS_EXP=;!SYS_PATH_EXP_CLEAN!;"
|
||||
|
||||
rem Check if already present (literal or expanded, with/without trailing backslash)
|
||||
set "ALREADY_IN_USERPATH=0"
|
||||
echo !CHECK_RAW! | findstr /I /C:"!SEARCH_LIT!" /C:"!SEARCH_LIT2!" >nul && set "ALREADY_IN_USERPATH=1"
|
||||
echo(!CHECK_USER_RAW! | findstr /I /C:"!SEARCH_LIT!" /C:"!SEARCH_LIT2!" >nul && set "ALREADY_IN_USERPATH=1"
|
||||
if "!ALREADY_IN_USERPATH!"=="0" (
|
||||
echo !CHECK_EXP! | findstr /I /C:"!SEARCH_EXP!" /C:"!SEARCH_EXP2!" >nul && set "ALREADY_IN_USERPATH=1"
|
||||
echo(!CHECK_USER_EXP! | findstr /I /C:"!SEARCH_EXP!" /C:"!SEARCH_EXP2!" >nul && set "ALREADY_IN_USERPATH=1"
|
||||
)
|
||||
|
||||
set "ALREADY_IN_SYSPATH=0"
|
||||
echo(!CHECK_SYS_RAW! | findstr /I /C:"!SEARCH_LIT!" /C:"!SEARCH_LIT2!" >nul && set "ALREADY_IN_SYSPATH=1"
|
||||
if "!ALREADY_IN_SYSPATH!"=="0" (
|
||||
echo(!CHECK_SYS_EXP! | findstr /I /C:"!SEARCH_EXP!" /C:"!SEARCH_EXP2!" >nul && set "ALREADY_IN_SYSPATH=1"
|
||||
)
|
||||
|
||||
if "!ALREADY_IN_USERPATH!"=="1" (
|
||||
echo User PATH already includes %%USERPROFILE%%\bin.
|
||||
) else (
|
||||
rem Not present: append to user PATH using setx without duplicating system PATH
|
||||
if defined USER_PATH_RAW (
|
||||
set "USER_PATH_NEW=!USER_PATH_RAW!"
|
||||
if not "!USER_PATH_NEW:~-1!"==";" set "USER_PATH_NEW=!USER_PATH_NEW!;"
|
||||
set "USER_PATH_NEW=!USER_PATH_NEW!!PCT!USERPROFILE!PCT!\bin"
|
||||
if "!ALREADY_IN_SYSPATH!"=="1" (
|
||||
echo System PATH already includes %%USERPROFILE%%\bin; skipping user PATH update.
|
||||
) else (
|
||||
set "USER_PATH_NEW=!PCT!USERPROFILE!PCT!\bin"
|
||||
)
|
||||
rem Persist update to HKCU\Environment\Path (user scope)
|
||||
setx PATH "!USER_PATH_NEW!" >nul
|
||||
if errorlevel 1 (
|
||||
echo WARNING: Failed to append %%USERPROFILE%%\bin to your user PATH.
|
||||
) else (
|
||||
echo Added %%USERPROFILE%%\bin to your user PATH.
|
||||
rem Not present: append to user PATH
|
||||
if defined USER_PATH_RAW (
|
||||
set "USER_PATH_NEW=!USER_PATH_RAW!"
|
||||
if not "!USER_PATH_NEW:~-1!"==";" set "USER_PATH_NEW=!USER_PATH_NEW!;"
|
||||
set "USER_PATH_NEW=!USER_PATH_NEW!!PCT!USERPROFILE!PCT!\bin"
|
||||
) else (
|
||||
set "USER_PATH_NEW=!PCT!USERPROFILE!PCT!\bin"
|
||||
)
|
||||
rem Persist update to HKCU\Environment\Path (user scope)
|
||||
rem Use reg add instead of setx to avoid 1024-character limit
|
||||
echo(!USER_PATH_NEW! | findstr /C:"\"" /C:"!" >nul
|
||||
if not errorlevel 1 (
|
||||
echo WARNING: Your PATH contains quotes or exclamation marks that may cause issues.
|
||||
echo Skipping automatic PATH update. Please add %%USERPROFILE%%\bin to your PATH manually.
|
||||
) else (
|
||||
reg add "HKCU\Environment" /v Path /t REG_EXPAND_SZ /d "!USER_PATH_NEW!" /f >nul
|
||||
if errorlevel 1 (
|
||||
echo WARNING: Failed to append %%USERPROFILE%%\bin to your user PATH.
|
||||
) else (
|
||||
echo Added %%USERPROFILE%%\bin to your user PATH.
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
rem Update current session PATH so codex-wrapper is immediately available
|
||||
rem Update current session PATH so codeagent-wrapper is immediately available
|
||||
set "CURPATH=;%PATH%;"
|
||||
echo !CURPATH! | findstr /I /C:"!SEARCH_EXP!" /C:"!SEARCH_EXP2!" /C:"!SEARCH_LIT!" /C:"!SEARCH_LIT2!" >nul
|
||||
set "CURPATH_CLEAN=!CURPATH:"=!"
|
||||
echo(!CURPATH_CLEAN! | findstr /I /C:"!SEARCH_EXP!" /C:"!SEARCH_EXP2!" /C:"!SEARCH_LIT!" /C:"!SEARCH_LIT2!" >nul
|
||||
if errorlevel 1 set "PATH=!DEST_DIR!;!PATH!"
|
||||
|
||||
goto :cleanup
|
||||
|
||||
563
install.py
563
install.py
@@ -23,6 +23,7 @@ except ImportError: # pragma: no cover
|
||||
jsonschema = None
|
||||
|
||||
DEFAULT_INSTALL_DIR = "~/.claude"
|
||||
SETTINGS_FILE = "settings.json"
|
||||
|
||||
|
||||
def _ensure_list(ctx: Dict[str, Any], key: str) -> List[Any]:
|
||||
@@ -46,7 +47,7 @@ def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
||||
)
|
||||
parser.add_argument(
|
||||
"--module",
|
||||
help="Comma-separated modules to install, or 'all' for all enabled",
|
||||
help="Comma-separated modules to install/uninstall, or 'all'",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--config",
|
||||
@@ -58,6 +59,16 @@ def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace:
|
||||
action="store_true",
|
||||
help="List available modules and exit",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--status",
|
||||
action="store_true",
|
||||
help="Show installation status of all modules",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--uninstall",
|
||||
action="store_true",
|
||||
help="Uninstall specified modules",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--force",
|
||||
action="store_true",
|
||||
@@ -81,6 +92,151 @@ def _load_json(path: Path) -> Any:
|
||||
raise ValueError(f"Invalid JSON in {path}: {exc}") from exc
|
||||
|
||||
|
||||
def _save_json(path: Path, data: Any) -> None:
|
||||
"""Save data to JSON file with proper formatting."""
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with path.open("w", encoding="utf-8") as fh:
|
||||
json.dump(data, fh, indent=2, ensure_ascii=False)
|
||||
fh.write("\n")
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Hooks Management
|
||||
# =============================================================================
|
||||
|
||||
def load_settings(ctx: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Load settings.json from install directory."""
|
||||
settings_path = ctx["install_dir"] / SETTINGS_FILE
|
||||
if settings_path.exists():
|
||||
try:
|
||||
return _load_json(settings_path)
|
||||
except (ValueError, FileNotFoundError):
|
||||
return {}
|
||||
return {}
|
||||
|
||||
|
||||
def save_settings(ctx: Dict[str, Any], settings: Dict[str, Any]) -> None:
|
||||
"""Save settings.json to install directory."""
|
||||
settings_path = ctx["install_dir"] / SETTINGS_FILE
|
||||
_save_json(settings_path, settings)
|
||||
|
||||
|
||||
def find_module_hooks(module_name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Optional[tuple]:
|
||||
"""Find hooks.json for a module if it exists.
|
||||
|
||||
Returns tuple of (hooks_config, plugin_root_path) or None.
|
||||
"""
|
||||
# Check for hooks in operations (copy_dir targets)
|
||||
for op in cfg.get("operations", []):
|
||||
if op.get("type") == "copy_dir":
|
||||
target_dir = ctx["install_dir"] / op["target"]
|
||||
hooks_file = target_dir / "hooks" / "hooks.json"
|
||||
if hooks_file.exists():
|
||||
try:
|
||||
return (_load_json(hooks_file), str(target_dir))
|
||||
except (ValueError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
# Also check source directory during install
|
||||
for op in cfg.get("operations", []):
|
||||
if op.get("type") == "copy_dir":
|
||||
target_dir = ctx["install_dir"] / op["target"]
|
||||
source_dir = ctx["config_dir"] / op["source"]
|
||||
hooks_file = source_dir / "hooks" / "hooks.json"
|
||||
if hooks_file.exists():
|
||||
try:
|
||||
return (_load_json(hooks_file), str(target_dir))
|
||||
except (ValueError, FileNotFoundError):
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _create_hook_marker(module_name: str) -> str:
|
||||
"""Create a marker to identify hooks from a specific module."""
|
||||
return f"__module:{module_name}__"
|
||||
|
||||
|
||||
def _replace_hook_variables(obj: Any, plugin_root: str) -> Any:
|
||||
"""Recursively replace ${CLAUDE_PLUGIN_ROOT} in hook config."""
|
||||
if isinstance(obj, str):
|
||||
return obj.replace("${CLAUDE_PLUGIN_ROOT}", plugin_root)
|
||||
elif isinstance(obj, dict):
|
||||
return {k: _replace_hook_variables(v, plugin_root) for k, v in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [_replace_hook_variables(item, plugin_root) for item in obj]
|
||||
return obj
|
||||
|
||||
|
||||
def merge_hooks_to_settings(module_name: str, hooks_config: Dict[str, Any], ctx: Dict[str, Any], plugin_root: str = "") -> None:
|
||||
"""Merge module hooks into settings.json."""
|
||||
settings = load_settings(ctx)
|
||||
settings.setdefault("hooks", {})
|
||||
|
||||
module_hooks = hooks_config.get("hooks", {})
|
||||
marker = _create_hook_marker(module_name)
|
||||
|
||||
# Replace ${CLAUDE_PLUGIN_ROOT} with actual path
|
||||
if plugin_root:
|
||||
module_hooks = _replace_hook_variables(module_hooks, plugin_root)
|
||||
|
||||
for hook_type, hook_entries in module_hooks.items():
|
||||
settings["hooks"].setdefault(hook_type, [])
|
||||
|
||||
for entry in hook_entries:
|
||||
# Add marker to identify this hook's source module
|
||||
entry_copy = dict(entry)
|
||||
entry_copy["__module__"] = module_name
|
||||
|
||||
# Check if already exists (avoid duplicates)
|
||||
exists = False
|
||||
for existing in settings["hooks"][hook_type]:
|
||||
if existing.get("__module__") == module_name:
|
||||
# Same module, check if same hook
|
||||
if _hooks_equal(existing, entry_copy):
|
||||
exists = True
|
||||
break
|
||||
|
||||
if not exists:
|
||||
settings["hooks"][hook_type].append(entry_copy)
|
||||
|
||||
save_settings(ctx, settings)
|
||||
write_log({"level": "INFO", "message": f"Merged hooks for module: {module_name}"}, ctx)
|
||||
|
||||
|
||||
def unmerge_hooks_from_settings(module_name: str, ctx: Dict[str, Any]) -> None:
|
||||
"""Remove module hooks from settings.json."""
|
||||
settings = load_settings(ctx)
|
||||
|
||||
if "hooks" not in settings:
|
||||
return
|
||||
|
||||
modified = False
|
||||
for hook_type in list(settings["hooks"].keys()):
|
||||
original_len = len(settings["hooks"][hook_type])
|
||||
settings["hooks"][hook_type] = [
|
||||
entry for entry in settings["hooks"][hook_type]
|
||||
if entry.get("__module__") != module_name
|
||||
]
|
||||
if len(settings["hooks"][hook_type]) < original_len:
|
||||
modified = True
|
||||
|
||||
# Remove empty hook type arrays
|
||||
if not settings["hooks"][hook_type]:
|
||||
del settings["hooks"][hook_type]
|
||||
|
||||
if modified:
|
||||
save_settings(ctx, settings)
|
||||
write_log({"level": "INFO", "message": f"Removed hooks for module: {module_name}"}, ctx)
|
||||
|
||||
|
||||
def _hooks_equal(hook1: Dict[str, Any], hook2: Dict[str, Any]) -> bool:
|
||||
"""Compare two hooks ignoring the __module__ marker."""
|
||||
h1 = {k: v for k, v in hook1.items() if k != "__module__"}
|
||||
h2 = {k: v for k, v in hook2.items() if k != "__module__"}
|
||||
return h1 == h2
|
||||
|
||||
|
||||
def load_config(path: str) -> Dict[str, Any]:
|
||||
"""Load config and validate against JSON Schema.
|
||||
|
||||
@@ -166,22 +322,93 @@ def resolve_paths(config: Dict[str, Any], args: argparse.Namespace) -> Dict[str,
|
||||
|
||||
def list_modules(config: Dict[str, Any]) -> None:
|
||||
print("Available Modules:")
|
||||
print(f"{'Name':<15} {'Default':<8} Description")
|
||||
print("-" * 60)
|
||||
for name, cfg in config.get("modules", {}).items():
|
||||
print(f"{'#':<3} {'Name':<15} {'Default':<8} Description")
|
||||
print("-" * 65)
|
||||
for idx, (name, cfg) in enumerate(config.get("modules", {}).items(), 1):
|
||||
default = "✓" if cfg.get("enabled", False) else "✗"
|
||||
desc = cfg.get("description", "")
|
||||
print(f"{name:<15} {default:<8} {desc}")
|
||||
print(f"{idx:<3} {name:<15} {default:<8} {desc}")
|
||||
print("\n✓ = installed by default when no --module specified")
|
||||
|
||||
|
||||
def load_installed_status(ctx: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Load installed modules status from status file."""
|
||||
status_path = Path(ctx["status_file"])
|
||||
if status_path.exists():
|
||||
try:
|
||||
return _load_json(status_path)
|
||||
except (ValueError, FileNotFoundError):
|
||||
return {"modules": {}}
|
||||
return {"modules": {}}
|
||||
|
||||
|
||||
def check_module_installed(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> bool:
|
||||
"""Check if a module is installed by verifying its files exist."""
|
||||
install_dir = ctx["install_dir"]
|
||||
|
||||
for op in cfg.get("operations", []):
|
||||
op_type = op.get("type")
|
||||
if op_type in ("copy_dir", "copy_file"):
|
||||
target = (install_dir / op["target"]).expanduser().resolve()
|
||||
if target.exists():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_installed_modules(config: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[str, bool]:
|
||||
"""Get installation status of all modules by checking files."""
|
||||
result = {}
|
||||
modules = config.get("modules", {})
|
||||
|
||||
# First check status file
|
||||
status = load_installed_status(ctx)
|
||||
status_modules = status.get("modules", {})
|
||||
|
||||
for name, cfg in modules.items():
|
||||
# Check both status file and filesystem
|
||||
in_status = name in status_modules
|
||||
files_exist = check_module_installed(name, cfg, ctx)
|
||||
result[name] = in_status or files_exist
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def list_modules_with_status(config: Dict[str, Any], ctx: Dict[str, Any]) -> None:
|
||||
"""List modules with installation status."""
|
||||
installed_status = get_installed_modules(config, ctx)
|
||||
status_data = load_installed_status(ctx)
|
||||
status_modules = status_data.get("modules", {})
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("Module Status")
|
||||
print("=" * 70)
|
||||
print(f"{'#':<3} {'Name':<15} {'Status':<15} {'Installed At':<20} Description")
|
||||
print("-" * 70)
|
||||
|
||||
for idx, (name, cfg) in enumerate(config.get("modules", {}).items(), 1):
|
||||
desc = cfg.get("description", "")[:25]
|
||||
if installed_status.get(name, False):
|
||||
status = "✅ Installed"
|
||||
installed_at = status_modules.get(name, {}).get("installed_at", "")[:16]
|
||||
else:
|
||||
status = "⬚ Not installed"
|
||||
installed_at = ""
|
||||
print(f"{idx:<3} {name:<15} {status:<15} {installed_at:<20} {desc}")
|
||||
|
||||
total = len(config.get("modules", {}))
|
||||
installed_count = sum(1 for v in installed_status.values() if v)
|
||||
print(f"\nTotal: {installed_count}/{total} modules installed")
|
||||
print(f"Install dir: {ctx['install_dir']}")
|
||||
|
||||
|
||||
def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[str, Any]:
|
||||
modules = config.get("modules", {})
|
||||
if not module_arg:
|
||||
return {k: v for k, v in modules.items() if v.get("enabled", False)}
|
||||
# No --module specified: show interactive selection
|
||||
return interactive_select_modules(config)
|
||||
|
||||
if module_arg.strip().lower() == "all":
|
||||
return {k: v for k, v in modules.items() if v.get("enabled", False)}
|
||||
return dict(modules.items())
|
||||
|
||||
selected: Dict[str, Any] = {}
|
||||
for name in (part.strip() for part in module_arg.split(",")):
|
||||
@@ -193,6 +420,263 @@ def select_modules(config: Dict[str, Any], module_arg: Optional[str]) -> Dict[st
|
||||
return selected
|
||||
|
||||
|
||||
def interactive_select_modules(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Interactive module selection when no --module is specified."""
|
||||
modules = config.get("modules", {})
|
||||
module_names = list(modules.keys())
|
||||
|
||||
print("\n" + "=" * 65)
|
||||
print("Welcome to Claude Plugin Installer")
|
||||
print("=" * 65)
|
||||
print("\nNo modules specified. Please select modules to install:\n")
|
||||
|
||||
list_modules(config)
|
||||
|
||||
print("\nEnter module numbers or names (comma-separated), or:")
|
||||
print(" 'all' - Install all modules")
|
||||
print(" 'q' - Quit without installing")
|
||||
print()
|
||||
|
||||
while True:
|
||||
try:
|
||||
user_input = input("Select modules: ").strip()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nInstallation cancelled.")
|
||||
sys.exit(0)
|
||||
|
||||
if not user_input:
|
||||
print("No input. Please enter module numbers, names, 'all', or 'q'.")
|
||||
continue
|
||||
|
||||
if user_input.lower() == "q":
|
||||
print("Installation cancelled.")
|
||||
sys.exit(0)
|
||||
|
||||
if user_input.lower() == "all":
|
||||
print(f"\nSelected all {len(modules)} modules.")
|
||||
return dict(modules.items())
|
||||
|
||||
# Parse selection
|
||||
selected: Dict[str, Any] = {}
|
||||
parts = [p.strip() for p in user_input.replace(" ", ",").split(",") if p.strip()]
|
||||
|
||||
try:
|
||||
for part in parts:
|
||||
# Try as number first
|
||||
if part.isdigit():
|
||||
idx = int(part) - 1
|
||||
if 0 <= idx < len(module_names):
|
||||
name = module_names[idx]
|
||||
selected[name] = modules[name]
|
||||
else:
|
||||
print(f"Invalid number: {part}. Valid range: 1-{len(module_names)}")
|
||||
selected = {}
|
||||
break
|
||||
# Try as name
|
||||
elif part in modules:
|
||||
selected[part] = modules[part]
|
||||
else:
|
||||
print(f"Module not found: '{part}'")
|
||||
selected = {}
|
||||
break
|
||||
|
||||
if selected:
|
||||
names = ", ".join(selected.keys())
|
||||
print(f"\nSelected {len(selected)} module(s): {names}")
|
||||
return selected
|
||||
|
||||
except ValueError:
|
||||
print("Invalid input. Please try again.")
|
||||
continue
|
||||
|
||||
|
||||
def uninstall_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Uninstall a module by removing its files and hooks."""
|
||||
result: Dict[str, Any] = {
|
||||
"module": name,
|
||||
"status": "success",
|
||||
"uninstalled_at": datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
install_dir = ctx["install_dir"]
|
||||
removed_paths = []
|
||||
|
||||
for op in cfg.get("operations", []):
|
||||
op_type = op.get("type")
|
||||
try:
|
||||
if op_type in ("copy_dir", "copy_file"):
|
||||
target = (install_dir / op["target"]).expanduser().resolve()
|
||||
if target.exists():
|
||||
if target.is_dir():
|
||||
shutil.rmtree(target)
|
||||
else:
|
||||
target.unlink()
|
||||
removed_paths.append(str(target))
|
||||
write_log({"level": "INFO", "message": f"Removed: {target}"}, ctx)
|
||||
# merge_dir and merge_json are harder to uninstall cleanly, skip
|
||||
except Exception as exc:
|
||||
write_log({"level": "WARNING", "message": f"Failed to remove {op.get('target', 'unknown')}: {exc}"}, ctx)
|
||||
|
||||
# Remove module hooks from settings.json
|
||||
try:
|
||||
unmerge_hooks_from_settings(name, ctx)
|
||||
result["hooks_removed"] = True
|
||||
except Exception as exc:
|
||||
write_log({"level": "WARNING", "message": f"Failed to remove hooks for {name}: {exc}"}, ctx)
|
||||
|
||||
result["removed_paths"] = removed_paths
|
||||
return result
|
||||
|
||||
|
||||
def update_status_after_uninstall(uninstalled_modules: List[str], ctx: Dict[str, Any]) -> None:
|
||||
"""Remove uninstalled modules from status file."""
|
||||
status = load_installed_status(ctx)
|
||||
modules = status.get("modules", {})
|
||||
|
||||
for name in uninstalled_modules:
|
||||
if name in modules:
|
||||
del modules[name]
|
||||
|
||||
status["modules"] = modules
|
||||
status["updated_at"] = datetime.now().isoformat()
|
||||
|
||||
status_path = Path(ctx["status_file"])
|
||||
with status_path.open("w", encoding="utf-8") as fh:
|
||||
json.dump(status, fh, indent=2, ensure_ascii=False)
|
||||
|
||||
|
||||
def interactive_manage(config: Dict[str, Any], ctx: Dict[str, Any]) -> int:
|
||||
"""Interactive module management menu."""
|
||||
while True:
|
||||
installed_status = get_installed_modules(config, ctx)
|
||||
modules = config.get("modules", {})
|
||||
module_names = list(modules.keys())
|
||||
|
||||
print("\n" + "=" * 70)
|
||||
print("Claude Plugin Manager")
|
||||
print("=" * 70)
|
||||
print(f"{'#':<3} {'Name':<15} {'Status':<15} Description")
|
||||
print("-" * 70)
|
||||
|
||||
for idx, (name, cfg) in enumerate(modules.items(), 1):
|
||||
desc = cfg.get("description", "")[:30]
|
||||
if installed_status.get(name, False):
|
||||
status = "✅ Installed"
|
||||
else:
|
||||
status = "⬚ Not installed"
|
||||
print(f"{idx:<3} {name:<15} {status:<15} {desc}")
|
||||
|
||||
total = len(modules)
|
||||
installed_count = sum(1 for v in installed_status.values() if v)
|
||||
print(f"\nInstalled: {installed_count}/{total} | Dir: {ctx['install_dir']}")
|
||||
|
||||
print("\nCommands:")
|
||||
print(" i <num/name> - Install module(s)")
|
||||
print(" u <num/name> - Uninstall module(s)")
|
||||
print(" q - Quit")
|
||||
print()
|
||||
|
||||
try:
|
||||
user_input = input("Enter command: ").strip()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nExiting.")
|
||||
return 0
|
||||
|
||||
if not user_input:
|
||||
continue
|
||||
|
||||
if user_input.lower() == "q":
|
||||
print("Goodbye!")
|
||||
return 0
|
||||
|
||||
parts = user_input.split(maxsplit=1)
|
||||
cmd = parts[0].lower()
|
||||
args = parts[1] if len(parts) > 1 else ""
|
||||
|
||||
if cmd == "i":
|
||||
# Install
|
||||
selected = _parse_module_selection(args, modules, module_names)
|
||||
if selected:
|
||||
# Filter out already installed
|
||||
to_install = {k: v for k, v in selected.items() if not installed_status.get(k, False)}
|
||||
if not to_install:
|
||||
print("All selected modules are already installed.")
|
||||
continue
|
||||
print(f"\nInstalling: {', '.join(to_install.keys())}")
|
||||
results = []
|
||||
for name, cfg in to_install.items():
|
||||
try:
|
||||
results.append(execute_module(name, cfg, ctx))
|
||||
print(f" ✓ {name} installed")
|
||||
except Exception as exc:
|
||||
print(f" ✗ {name} failed: {exc}")
|
||||
# Update status
|
||||
current_status = load_installed_status(ctx)
|
||||
for r in results:
|
||||
if r.get("status") == "success":
|
||||
current_status.setdefault("modules", {})[r["module"]] = r
|
||||
current_status["updated_at"] = datetime.now().isoformat()
|
||||
with Path(ctx["status_file"]).open("w", encoding="utf-8") as fh:
|
||||
json.dump(current_status, fh, indent=2, ensure_ascii=False)
|
||||
|
||||
elif cmd == "u":
|
||||
# Uninstall
|
||||
selected = _parse_module_selection(args, modules, module_names)
|
||||
if selected:
|
||||
# Filter to only installed ones
|
||||
to_uninstall = {k: v for k, v in selected.items() if installed_status.get(k, False)}
|
||||
if not to_uninstall:
|
||||
print("None of the selected modules are installed.")
|
||||
continue
|
||||
print(f"\nUninstalling: {', '.join(to_uninstall.keys())}")
|
||||
confirm = input("Confirm? (y/N): ").strip().lower()
|
||||
if confirm != "y":
|
||||
print("Cancelled.")
|
||||
continue
|
||||
for name, cfg in to_uninstall.items():
|
||||
try:
|
||||
uninstall_module(name, cfg, ctx)
|
||||
print(f" ✓ {name} uninstalled")
|
||||
except Exception as exc:
|
||||
print(f" ✗ {name} failed: {exc}")
|
||||
update_status_after_uninstall(list(to_uninstall.keys()), ctx)
|
||||
|
||||
else:
|
||||
print(f"Unknown command: {cmd}. Use 'i', 'u', or 'q'.")
|
||||
|
||||
|
||||
def _parse_module_selection(
|
||||
args: str, modules: Dict[str, Any], module_names: List[str]
|
||||
) -> Dict[str, Any]:
|
||||
"""Parse module selection from user input."""
|
||||
if not args:
|
||||
print("Please specify module number(s) or name(s).")
|
||||
return {}
|
||||
|
||||
if args.lower() == "all":
|
||||
return dict(modules.items())
|
||||
|
||||
selected: Dict[str, Any] = {}
|
||||
parts = [p.strip() for p in args.replace(",", " ").split() if p.strip()]
|
||||
|
||||
for part in parts:
|
||||
if part.isdigit():
|
||||
idx = int(part) - 1
|
||||
if 0 <= idx < len(module_names):
|
||||
name = module_names[idx]
|
||||
selected[name] = modules[name]
|
||||
else:
|
||||
print(f"Invalid number: {part}")
|
||||
return {}
|
||||
elif part in modules:
|
||||
selected[part] = modules[part]
|
||||
else:
|
||||
print(f"Module not found: '{part}'")
|
||||
return {}
|
||||
|
||||
return selected
|
||||
|
||||
|
||||
def ensure_install_dir(path: Path) -> None:
|
||||
path = Path(path)
|
||||
if path.exists() and not path.is_dir():
|
||||
@@ -241,6 +725,18 @@ def execute_module(name: str, cfg: Dict[str, Any], ctx: Dict[str, Any]) -> Dict[
|
||||
)
|
||||
raise
|
||||
|
||||
# Handle hooks: find and merge module hooks into settings.json
|
||||
hooks_result = find_module_hooks(name, cfg, ctx)
|
||||
if hooks_result:
|
||||
hooks_config, plugin_root = hooks_result
|
||||
try:
|
||||
merge_hooks_to_settings(name, hooks_config, ctx, plugin_root)
|
||||
result["operations"].append({"type": "merge_hooks", "status": "success"})
|
||||
result["has_hooks"] = True
|
||||
except Exception as exc:
|
||||
write_log({"level": "WARNING", "message": f"Failed to merge hooks for {name}: {exc}"}, ctx)
|
||||
result["operations"].append({"type": "merge_hooks", "status": "failed", "error": str(exc)})
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -529,10 +1025,54 @@ def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
|
||||
ctx = resolve_paths(config, args)
|
||||
|
||||
# Handle --list-modules
|
||||
if getattr(args, "list_modules", False):
|
||||
list_modules(config)
|
||||
return 0
|
||||
|
||||
# Handle --status
|
||||
if getattr(args, "status", False):
|
||||
list_modules_with_status(config, ctx)
|
||||
return 0
|
||||
|
||||
# Handle --uninstall
|
||||
if getattr(args, "uninstall", False):
|
||||
if not args.module:
|
||||
print("Error: --uninstall requires --module to specify which modules to uninstall")
|
||||
return 1
|
||||
modules = config.get("modules", {})
|
||||
installed = load_installed_status(ctx)
|
||||
installed_modules = installed.get("modules", {})
|
||||
|
||||
selected = select_modules(config, args.module)
|
||||
to_uninstall = {k: v for k, v in selected.items() if k in installed_modules}
|
||||
|
||||
if not to_uninstall:
|
||||
print("None of the specified modules are installed.")
|
||||
return 0
|
||||
|
||||
print(f"Uninstalling {len(to_uninstall)} module(s): {', '.join(to_uninstall.keys())}")
|
||||
for name, cfg in to_uninstall.items():
|
||||
try:
|
||||
uninstall_module(name, cfg, ctx)
|
||||
print(f" ✓ {name} uninstalled")
|
||||
except Exception as exc:
|
||||
print(f" ✗ {name} failed: {exc}", file=sys.stderr)
|
||||
|
||||
update_status_after_uninstall(list(to_uninstall.keys()), ctx)
|
||||
print(f"\n✓ Uninstall complete")
|
||||
return 0
|
||||
|
||||
# No --module specified: enter interactive management mode
|
||||
if not args.module:
|
||||
try:
|
||||
ensure_install_dir(ctx["install_dir"])
|
||||
except Exception as exc:
|
||||
print(f"Failed to prepare install dir: {exc}", file=sys.stderr)
|
||||
return 1
|
||||
return interactive_manage(config, ctx)
|
||||
|
||||
# Install specified modules
|
||||
modules = select_modules(config, args.module)
|
||||
|
||||
try:
|
||||
@@ -568,7 +1108,14 @@ def main(argv: Optional[Iterable[str]] = None) -> int:
|
||||
)
|
||||
break
|
||||
|
||||
write_status(results, ctx)
|
||||
# Merge with existing status
|
||||
current_status = load_installed_status(ctx)
|
||||
for r in results:
|
||||
if r.get("status") == "success":
|
||||
current_status.setdefault("modules", {})[r["module"]] = r
|
||||
current_status["updated_at"] = datetime.now().isoformat()
|
||||
with Path(ctx["status_file"]).open("w", encoding="utf-8") as fh:
|
||||
json.dump(current_status, fh, indent=2, ensure_ascii=False)
|
||||
|
||||
# Summary
|
||||
success = sum(1 for r in results if r.get("status") == "success")
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user